From 7f01f7d16230fe011a3f52db9e477a958796b202 Mon Sep 17 00:00:00 2001 From: Maxim Vezenov Date: Wed, 24 Apr 2024 16:10:55 +0100 Subject: [PATCH 001/201] chore: Reset noir-gates-diff report on master (#6003) Similarly to https://github.com/noir-lang/noir/pull/4878 the gates diff report got corrupted following changes to `nargo info`. This PR simply resets the gates diff so it is expected to see really large gates differences. You can look into the Noir PR for more info. There will be a followup which sets back to the correct noir-gates-diff commit. Without this reset we will not be able to get accurate gate diffs based off of master. --- .github/workflows/protocol-circuits-gate-diff.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/protocol-circuits-gate-diff.yml b/.github/workflows/protocol-circuits-gate-diff.yml index 0840e67449b..f7b61ff3c90 100644 --- a/.github/workflows/protocol-circuits-gate-diff.yml +++ b/.github/workflows/protocol-circuits-gate-diff.yml @@ -90,7 +90,7 @@ jobs: - name: Compare gates reports id: gates_diff - uses: TomAFrench/noir-gates-diff@df05f34e2ab275ddc4f2cac065df1c88f8a05e5d + uses: vezenovm/noir-gates-diff@45e9c9a21deb236fa7f38138b42b33ddaf7c0985 with: report: protocol_circuits_report.json summaryQuantile: 0 # Display any diff in gate count From d8e5af4eb023f68140d8cebd39d1d15b4683a4a3 Mon Sep 17 00:00:00 2001 From: Maxim Vezenov Date: Wed, 24 Apr 2024 16:28:09 +0100 Subject: [PATCH 002/201] fix: Use correct gates diff commit now that master has been reset (#6004) Follow-up to https://github.com/AztecProtocol/aztec-packages/pull/6003. This PR simply updates to using a noir gates diff that expects an uncorrupted comparison report on master. --- .github/workflows/protocol-circuits-gate-diff.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/protocol-circuits-gate-diff.yml b/.github/workflows/protocol-circuits-gate-diff.yml index f7b61ff3c90..b31e371d471 100644 --- a/.github/workflows/protocol-circuits-gate-diff.yml +++ b/.github/workflows/protocol-circuits-gate-diff.yml @@ -90,7 +90,7 @@ jobs: - name: Compare gates reports id: gates_diff - uses: vezenovm/noir-gates-diff@45e9c9a21deb236fa7f38138b42b33ddaf7c0985 + uses: vezenovm/noir-gates-diff@acf12797860f237117e15c0d6e08d64253af52b6 with: report: protocol_circuits_report.json summaryQuantile: 0 # Display any diff in gate count From 067e4607019c17dad7c3861734c4bee0e849fbad Mon Sep 17 00:00:00 2001 From: ludamad Date: Wed, 24 Apr 2024 10:52:08 -0500 Subject: [PATCH 003/201] chore(ci): hotfix runners not starting --- .github/workflows/setup-runner.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/setup-runner.yml b/.github/workflows/setup-runner.yml index 80d3bd59056..b3685eeea80 100644 --- a/.github/workflows/setup-runner.yml +++ b/.github/workflows/setup-runner.yml @@ -58,7 +58,7 @@ jobs: group: start-builder-${{ inputs.runner_label }} steps: - name: Start EC2 runner - uses: AztecProtocol/ec2-action-builder@v0.14e + uses: AztecProtocol/ec2-action-builder@v0.15 with: github_token: ${{ secrets.GH_SELF_HOSTED_RUNNER_TOKEN }} aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }} From b30d0b6481b0f0b2241f1fcc9ec9bc0f82308ce9 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Wed, 24 Apr 2024 17:06:28 +0100 Subject: [PATCH 004/201] fix: refuse to start sequencer without a prover (#6000) Avoids a bad situation where a sequencer node produces empty blocks that fail to validate in the rollup contract. --- yarn-project/aztec/src/cli/cmds/start_node.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/yarn-project/aztec/src/cli/cmds/start_node.ts b/yarn-project/aztec/src/cli/cmds/start_node.ts index 3b29bb8ee4b..63a5ab7aa9b 100644 --- a/yarn-project/aztec/src/cli/cmds/start_node.ts +++ b/yarn-project/aztec/src/cli/cmds/start_node.ts @@ -67,6 +67,10 @@ export const startNode = async ( nodeConfig.disableProver = true; } + if (!nodeConfig.disableSequencer && nodeConfig.disableProver) { + throw new Error('Cannot run a sequencer without a prover'); + } + // Create and start Aztec Node. const node = await createAztecNode(nodeConfig); const nodeServer = createAztecNodeRpcServer(node); From 0c712b9c0f69bad0da3910add5adba40622d3cea Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Wed, 24 Apr 2024 17:09:33 +0100 Subject: [PATCH 005/201] feat: serialize public kernel private inputs (#5971) Serialize `PublicKernelCircuitPrivateInputs` to and from strings --- ...blic_kernel_circuit_private_inputs.test.ts | 6 ++++ .../public_kernel_circuit_private_inputs.ts | 30 +++++++++++++++++++ 2 files changed, 36 insertions(+) diff --git a/yarn-project/circuits.js/src/structs/kernel/public_kernel_circuit_private_inputs.test.ts b/yarn-project/circuits.js/src/structs/kernel/public_kernel_circuit_private_inputs.test.ts index 0240c1b081c..9f783f1a305 100644 --- a/yarn-project/circuits.js/src/structs/kernel/public_kernel_circuit_private_inputs.test.ts +++ b/yarn-project/circuits.js/src/structs/kernel/public_kernel_circuit_private_inputs.test.ts @@ -14,4 +14,10 @@ describe('PublicKernelCircuitPrivateInputs', () => { expect(original).toEqual(serialized); expect(original).not.toBe(serialized); }); + + it('serializes to and deserializes from a string', () => { + const original = makePublicKernelCircuitPrivateInputs(123); + const serialized = PublicKernelCircuitPrivateInputs.fromString(original.toString()); + expect(original).toEqual(serialized); + }); }); diff --git a/yarn-project/circuits.js/src/structs/kernel/public_kernel_circuit_private_inputs.ts b/yarn-project/circuits.js/src/structs/kernel/public_kernel_circuit_private_inputs.ts index 63f377cd63c..190593832db 100644 --- a/yarn-project/circuits.js/src/structs/kernel/public_kernel_circuit_private_inputs.ts +++ b/yarn-project/circuits.js/src/structs/kernel/public_kernel_circuit_private_inputs.ts @@ -18,10 +18,27 @@ export class PublicKernelCircuitPrivateInputs { public readonly publicCall: PublicCallData, ) {} + /** + * Serializes the object to a buffer. + * @returns - Buffer representation of the object. + */ toBuffer() { return serializeToBuffer(this.previousKernel, this.publicCall); } + /** + * Serializes the object to a hex string. + * @returns - Hex string representation of the object. + */ + toString() { + return this.toBuffer().toString('hex'); + } + + /** + * Deserializes the object from a buffer. + * @param buffer - Buffer to deserialize. + * @returns - Deserialized object. + */ static fromBuffer(buffer: BufferReader | Buffer) { const reader = BufferReader.asReader(buffer); const previousKernel = reader.readObject(PublicKernelData); @@ -29,6 +46,19 @@ export class PublicKernelCircuitPrivateInputs { return new PublicKernelCircuitPrivateInputs(previousKernel, publicCall); } + /** + * Deserializes the object from a hex string. + * @param str - Hex string to deserialize. + * @returns - Deserialized object. + */ + static fromString(str: string) { + return PublicKernelCircuitPrivateInputs.fromBuffer(Buffer.from(str, 'hex')); + } + + /** + * Clones the object. + * @returns - Cloned object. + */ clone() { return PublicKernelCircuitPrivateInputs.fromBuffer(this.toBuffer()); } From 88ee0af9987063d63afb49c4f61ab5ae5f7c1b73 Mon Sep 17 00:00:00 2001 From: Lasse Herskind <16536249+LHerskind@users.noreply.github.com> Date: Wed, 24 Apr 2024 17:35:07 +0100 Subject: [PATCH 006/201] feat: add the storage layout to the contract artifact (#5952) Fixes #5947 and includes the notes as well. The notes was part of the contract object, but not directly exposed on the artifact itself. Needed to fix a few additional things to make it work: - The `deploy_contract` function was broken (old call flow), it cannot do the new flow because that would be a circular dependency. - Using the `storageLayout` values to get the `storageSlot` that we are using in multiple tests - Using the `notes` values to get the `noteTypeId` that we are using in multiple tests - Removed `lodash.uniqby` as the dependency was not needed after this. --- yarn-project/aztec.js/src/api/abi.ts | 2 +- .../aztec.js/src/contract/contract.test.ts | 2 + .../aztec.js/src/contract/contract_base.ts | 38 +++------- yarn-project/builder/package.json | 2 - .../src/contract-interface-gen/typescript.ts | 71 ++++++++----------- yarn-project/circuit-types/src/mocks.ts | 2 + .../src/contract/artifact_hash.test.ts | 2 + .../src/e2e_account_init_fees.test.ts | 4 +- .../end-to-end/src/sample-dapp/index.mjs | 4 +- .../end-to-end/src/sample-dapp/index.test.mjs | 4 +- yarn-project/end-to-end/src/shared/browser.ts | 10 +-- yarn-project/foundation/src/abi/abi.ts | 37 ++++++++++ .../p2p/src/service/discv5_service.test.ts | 3 + .../src/client/private_execution.test.ts | 28 +++++--- .../simulator/src/client/simulator.test.ts | 4 +- .../simulator/src/public/index.test.ts | 2 +- .../types/src/abi/contract_artifact.test.ts | 1 - .../types/src/abi/contract_artifact.ts | 65 +++++++++++++++++ yarn-project/yarn.lock | 18 ----- 19 files changed, 182 insertions(+), 117 deletions(-) diff --git a/yarn-project/aztec.js/src/api/abi.ts b/yarn-project/aztec.js/src/api/abi.ts index 7e94b7eb30f..03a5b41d4f2 100644 --- a/yarn-project/aztec.js/src/api/abi.ts +++ b/yarn-project/aztec.js/src/api/abi.ts @@ -1,3 +1,3 @@ export { ContractArtifact, FunctionArtifact, FunctionSelector } from '@aztec/foundation/abi'; -export { loadContractArtifact } from '@aztec/types/abi'; +export { loadContractArtifact, contractArtifactToBuffer, contractArtifactFromBuffer } from '@aztec/types/abi'; export { NoirCompiledContract } from '@aztec/types/noir'; diff --git a/yarn-project/aztec.js/src/contract/contract.test.ts b/yarn-project/aztec.js/src/contract/contract.test.ts index af425afcc90..bcdac6b8dc9 100644 --- a/yarn-project/aztec.js/src/contract/contract.test.ts +++ b/yarn-project/aztec.js/src/contract/contract.test.ts @@ -105,6 +105,8 @@ describe('Contract Class', () => { globals: {}, }, fileMap: {}, + storageLayout: {}, + notes: {}, }; beforeEach(() => { diff --git a/yarn-project/aztec.js/src/contract/contract_base.ts b/yarn-project/aztec.js/src/contract/contract_base.ts index 5758973849e..0ec2240a404 100644 --- a/yarn-project/aztec.js/src/contract/contract_base.ts +++ b/yarn-project/aztec.js/src/contract/contract_base.ts @@ -1,5 +1,11 @@ -import { type Fr, computePartialAddress } from '@aztec/circuits.js'; -import { type ContractArtifact, type FunctionArtifact, FunctionSelector } from '@aztec/foundation/abi'; +import { computePartialAddress } from '@aztec/circuits.js'; +import { + type ContractArtifact, + type ContractNote, + type FieldLayout, + type FunctionArtifact, + FunctionSelector, +} from '@aztec/foundation/abi'; import { type ContractInstanceWithAddress } from '@aztec/types/contracts'; import { type Wallet } from '../account/index.js'; @@ -16,34 +22,6 @@ export type ContractMethod = ((...args: any[]) => ContractFunctionInteraction) & readonly selector: FunctionSelector; }; -/** - * Type representing a field layout in the storage of a contract. - */ -type FieldLayout = { - /** - * Slot in which the field is stored. - */ - slot: Fr; - /** - * Type being stored at the slot - */ - typ: string; -}; - -/** - * Type representing a note in use in the contract. - */ -type ContractNote = { - /** - * Note identifier - */ - id: Fr; - /** - * Type of the note - */ - typ: string; -}; - /** * Type representing the storage layout of a contract. */ diff --git a/yarn-project/builder/package.json b/yarn-project/builder/package.json index 5f87dd34488..cac014a948c 100644 --- a/yarn-project/builder/package.json +++ b/yarn-project/builder/package.json @@ -58,7 +58,6 @@ "fs-extra": "^11.1.1", "lodash.camelcase": "^4.3.0", "lodash.capitalize": "^4.2.1", - "lodash.uniqby": "^4.7.0", "memfs": "^4.6.0", "pako": "^2.1.0", "semver": "^7.5.4", @@ -71,7 +70,6 @@ "@types/jest": "^29.5.0", "@types/lodash.camelcase": "^4.3.7", "@types/lodash.capitalize": "^4.2.7", - "@types/lodash.uniqby": "^4.7.9", "@types/node": "^18.7.23", "@types/pako": "^2.0.0", "@types/semver": "^7.5.4", diff --git a/yarn-project/builder/src/contract-interface-gen/typescript.ts b/yarn-project/builder/src/contract-interface-gen/typescript.ts index b86a32dcbdb..4c2911fba97 100644 --- a/yarn-project/builder/src/contract-interface-gen/typescript.ts +++ b/yarn-project/builder/src/contract-interface-gen/typescript.ts @@ -1,12 +1,7 @@ import { type ABIParameter, - type BasicValue, type ContractArtifact, type FunctionArtifact, - type IntegerValue, - type StructValue, - type TupleValue, - type TypedStructFieldValue, getDefaultInitializer, isAztecAddressStruct, isEthAddressStruct, @@ -14,8 +9,6 @@ import { isWrappedFieldStruct, } from '@aztec/foundation/abi'; -import uniqBy from 'lodash.uniqby'; - /** * Returns the corresponding typescript type for a given Noir type. * @param type - The input Noir type. @@ -192,33 +185,29 @@ function generateAbiStatement(name: string, artifactImportPath: string) { * @param input - The contract artifact. */ function generateStorageLayoutGetter(input: ContractArtifact) { - const storage = input.outputs.globals.storage ? (input.outputs.globals.storage[0] as StructValue) : { fields: [] }; - const storageFields = storage.fields as TypedStructFieldValue[]; - const storageFieldsUnionType = storageFields.map(f => `'${f.name}'`).join(' | '); - const layout = storageFields + const entries = Object.entries(input.storageLayout); + + if (entries.length === 0) { + return ''; + } + + const storageFieldsUnionType = entries.map(([name]) => `'${name}'`).join(' | '); + const layout = entries .map( - ({ - name, - value: { - fields: [slot, typ], - }, - }) => + ([name, { slot, typ }]) => `${name}: { - slot: new Fr(${(slot.value as IntegerValue).value}n), - typ: "${(typ.value as BasicValue<'string', string>).value}", - } - `, + slot: new Fr(${slot.toBigInt()}n), + typ: "${typ}", + }`, ) .join(',\n'); - return storageFields.length > 0 - ? ` - public static get storage(): ContractStorageLayout<${storageFieldsUnionType}> { + + return `public static get storage(): ContractStorageLayout<${storageFieldsUnionType}> { return { ${layout} } as ContractStorageLayout<${storageFieldsUnionType}>; } - ` - : ''; + `; } /** @@ -226,30 +215,28 @@ function generateStorageLayoutGetter(input: ContractArtifact) { * @param input - The contract artifact. */ function generateNotesGetter(input: ContractArtifact) { - const notes = input.outputs.globals.notes - ? uniqBy(input.outputs.globals.notes as TupleValue[], n => (n.fields[1] as BasicValue<'string', string>).value) - : []; - const notesUnionType = notes.map(n => `'${(n.fields[1] as BasicValue<'string', string>).value}'`).join(' | '); + const entries = Object.entries(input.notes); - const noteMetadata = notes + if (entries.length === 0) { + return ''; + } + + const notesUnionType = entries.map(([name]) => `'${name}'`).join(' | '); + const noteMetadata = entries .map( - ({ fields: [id, typ] }) => - `${(typ as BasicValue<'string', string>).value}: { - id: new Fr(${(id as IntegerValue).value}n), - } - `, + ([name, { id }]) => + `${name}: { + id: new Fr(${id.toBigInt()}n), + }`, ) .join(',\n'); - return notes.length > 0 - ? ` - public static get notes(): ContractNotes<${notesUnionType}> { - const notes = this.artifact.outputs.globals.notes ? (this.artifact.outputs.globals.notes as any) : []; + + return `public static get notes(): ContractNotes<${notesUnionType}> { return { ${noteMetadata} } as ContractNotes<${notesUnionType}>; } - ` - : ''; + `; } /** diff --git a/yarn-project/circuit-types/src/mocks.ts b/yarn-project/circuit-types/src/mocks.ts index 7f0442ffd3e..9cc201d2feb 100644 --- a/yarn-project/circuit-types/src/mocks.ts +++ b/yarn-project/circuit-types/src/mocks.ts @@ -125,6 +125,8 @@ export const randomContractArtifact = (): ContractArtifact => ({ globals: {}, }, fileMap: {}, + storageLayout: {}, + notes: {}, }); export const randomContractInstanceWithAddress = (opts: { contractClassId?: Fr } = {}): ContractInstanceWithAddress => diff --git a/yarn-project/circuits.js/src/contract/artifact_hash.test.ts b/yarn-project/circuits.js/src/contract/artifact_hash.test.ts index 2953da240c1..2d17740c9c2 100644 --- a/yarn-project/circuits.js/src/contract/artifact_hash.test.ts +++ b/yarn-project/circuits.js/src/contract/artifact_hash.test.ts @@ -12,6 +12,8 @@ describe('ArtifactHash', () => { globals: {}, structs: {}, }, + storageLayout: {}, + notes: {}, }; expect(computeArtifactHash(emptyArtifact).toString()).toMatchInlineSnapshot( `"0x0dea64e7fa0688017f77bcb7075485485afb4a5f1f8508483398869439f82fdf"`, diff --git a/yarn-project/end-to-end/src/e2e_account_init_fees.test.ts b/yarn-project/end-to-end/src/e2e_account_init_fees.test.ts index 75c225f4d52..2f488cca319 100644 --- a/yarn-project/end-to-end/src/e2e_account_init_fees.test.ts +++ b/yarn-project/end-to-end/src/e2e_account_init_fees.test.ts @@ -333,8 +333,8 @@ describe('e2e_fees_account_init', () => { }); async function addTransparentNoteToPxe(owner: AztecAddress, amount: bigint, secretHash: Fr, txHash: TxHash) { - const storageSlot = new Fr(5); // The storage slot of `pending_shields` is 5. - const noteTypeId = new Fr(84114971101151129711410111011678111116101n); // TransparentNote + const storageSlot = bananaCoin.artifact.storageLayout['pending_shields'].slot; + const noteTypeId = bananaCoin.artifact.notes['TransparentNote'].id; const note = new Note([new Fr(amount), secretHash]); // this note isn't encrypted but we need to provide a registered public key diff --git a/yarn-project/end-to-end/src/sample-dapp/index.mjs b/yarn-project/end-to-end/src/sample-dapp/index.mjs index 6f421f61f3a..f575fe89569 100644 --- a/yarn-project/end-to-end/src/sample-dapp/index.mjs +++ b/yarn-project/end-to-end/src/sample-dapp/index.mjs @@ -37,8 +37,8 @@ async function mintPrivateFunds(pxe) { const secretHash = await computeSecretHash(secret); const receipt = await token.methods.mint_private(mintAmount, secretHash).send().wait(); - const storageSlot = new Fr(5); - const noteTypeId = new Fr(84114971101151129711410111011678111116101n); // TransparentNote + const storageSlot = token.artifact.storageLayout['pending_shields'].slot; + const noteTypeId = token.artifact.notes['TransparentNote'].id; const note = new Note([new Fr(mintAmount), secretHash]); const extendedNote = new ExtendedNote( diff --git a/yarn-project/end-to-end/src/sample-dapp/index.test.mjs b/yarn-project/end-to-end/src/sample-dapp/index.test.mjs index 9508ab1631b..49a6156046d 100644 --- a/yarn-project/end-to-end/src/sample-dapp/index.test.mjs +++ b/yarn-project/end-to-end/src/sample-dapp/index.test.mjs @@ -22,8 +22,8 @@ describe('token', () => { const secretHash = await computeSecretHash(secret); const receipt = await token.methods.mint_private(initialBalance, secretHash).send().wait(); - const storageSlot = new Fr(5); - const noteTypeId = new Fr(84114971101151129711410111011678111116101n); // TransparentNote + const storageSlot = token.artifact.storageLayout['pending_shields'].slot; + const noteTypeId = token.artifact.notes['TransparentNote'].id; const note = new Note([new Fr(initialBalance), secretHash]); const extendedNote = new ExtendedNote( note, diff --git a/yarn-project/end-to-end/src/shared/browser.ts b/yarn-project/end-to-end/src/shared/browser.ts index 370a7698337..9a94d89db75 100644 --- a/yarn-project/end-to-end/src/shared/browser.ts +++ b/yarn-project/end-to-end/src/shared/browser.ts @@ -227,11 +227,11 @@ export const browserTestSuite = ( INITIAL_TEST_SIGNING_KEYS, INITIAL_TEST_ACCOUNT_SALTS, Buffer, + contractArtifactFromBuffer, } = window.AztecJs; // We serialize the artifact since buffers (used for bytecode) do not cross well from one realm to another - const TokenContractArtifact = JSON.parse( - Buffer.from(serializedTokenContractArtifact, 'base64').toString('utf-8'), - (key, value) => (key === 'bytecode' && typeof value === 'string' ? Buffer.from(value, 'base64') : value), + const TokenContractArtifact = contractArtifactFromBuffer( + Buffer.from(serializedTokenContractArtifact, 'base64'), ); const pxe = createPXEClient(rpcUrl!); @@ -264,9 +264,9 @@ export const browserTestSuite = ( const secretHash = computeSecretHash(secret); const mintPrivateReceipt = await token.methods.mint_private(initialBalance, secretHash).send().wait(); - const storageSlot = new Fr(5); + const storageSlot = token.artifact.storageLayout['pending_shields'].slot; - const noteTypeId = new Fr(84114971101151129711410111011678111116101n); + const noteTypeId = token.artifact.notes['TransparentNote'].id; const note = new Note([new Fr(initialBalance), secretHash]); const extendedNote = new ExtendedNote( note, diff --git a/yarn-project/foundation/src/abi/abi.ts b/yarn-project/foundation/src/abi/abi.ts index 9f2d48800f8..3dbe2dc7036 100644 --- a/yarn-project/foundation/src/abi/abi.ts +++ b/yarn-project/foundation/src/abi/abi.ts @@ -1,5 +1,6 @@ import { inflate } from 'pako'; +import { type Fr } from '../fields/fields.js'; import { type FunctionSelector } from './function_selector.js'; /** @@ -267,6 +268,34 @@ export type DebugFileMap = Record< } >; +/** + * Type representing a note in use in the contract. + */ +export type ContractNote = { + /** + * Note identifier + */ + id: Fr; + /** + * Type of the note (e.g., 'TransparentNote') + */ + typ: string; +}; + +/** + * Type representing a field layout in the storage of a contract. + */ +export type FieldLayout = { + /** + * Slot in which the field is stored. + */ + slot: Fr; + /** + * Type being stored at the slot (e.g., 'Map>') + */ + typ: string; +}; + /** * Defines artifact of a contract. */ @@ -292,6 +321,14 @@ export interface ContractArtifact { structs: Record; globals: Record; }; + /** + * Storage layout + */ + storageLayout: Record; + /** + * The notes used in the contract. + */ + notes: Record; /** * The map of file ID to the source code and path of the file. diff --git a/yarn-project/p2p/src/service/discv5_service.test.ts b/yarn-project/p2p/src/service/discv5_service.test.ts index 9228e1b87d0..4ce6a233075 100644 --- a/yarn-project/p2p/src/service/discv5_service.test.ts +++ b/yarn-project/p2p/src/service/discv5_service.test.ts @@ -1,3 +1,4 @@ +import { jest } from '@jest/globals'; import type { PeerId } from '@libp2p/interface'; import { BootstrapNode } from '../bootstrap/bootstrap.js'; @@ -21,6 +22,8 @@ const waitForPeers = (node: DiscV5Service, expectedCount: number): Promise }; describe('Discv5Service', () => { + jest.setTimeout(10_000); + let bootNode: BootstrapNode; let bootNodePeerId: PeerId; let port = 1234; diff --git a/yarn-project/simulator/src/client/private_execution.test.ts b/yarn-project/simulator/src/client/private_execution.test.ts index ecff905aea0..b34939ce8ee 100644 --- a/yarn-project/simulator/src/client/private_execution.test.ts +++ b/yarn-project/simulator/src/client/private_execution.test.ts @@ -340,10 +340,13 @@ describe('Private Execution test suite', () => { const amountToTransfer = 100n; const artifact = getFunctionArtifact(StatefulTestContractArtifact, 'destroy_and_create_no_init_check'); - const storageSlot = computeSlotForMapping(new Fr(1n), owner); - const recipientStorageSlot = computeSlotForMapping(new Fr(1n), recipient); + const storageSlot = computeSlotForMapping(StatefulTestContractArtifact.storageLayout['notes'].slot, owner); + const recipientStorageSlot = computeSlotForMapping( + StatefulTestContractArtifact.storageLayout['notes'].slot, + recipient, + ); - const noteTypeId = new Fr(869710811710178111116101n); // ValueNote + const noteTypeId = StatefulTestContractArtifact.notes['ValueNote'].id; const notes = [buildNote(60n, owner, storageSlot, noteTypeId), buildNote(80n, owner, storageSlot, noteTypeId)]; oracle.getNotes.mockResolvedValue(notes); @@ -398,7 +401,7 @@ describe('Private Execution test suite', () => { const artifact = getFunctionArtifact(StatefulTestContractArtifact, 'destroy_and_create_no_init_check'); const storageSlot = computeSlotForMapping(new Fr(1n), owner); - const noteTypeId = new Fr(869710811710178111116101n); // ValueNote + const noteTypeId = StatefulTestContractArtifact.notes['ValueNote'].id; const notes = [buildNote(balance, owner, storageSlot, noteTypeId)]; oracle.getNotes.mockResolvedValue(notes); @@ -729,7 +732,8 @@ describe('Private Execution test suite', () => { const secret = new Fr(1n); const secretHash = computeSecretHash(secret); const note = new Note([secretHash]); - const storageSlot = new Fr(5); + // @todo @LHerskind (#6001) Need to investigate why this was working with `new Fr(5)` as the `example_set = 2` should have caused a failure. + const storageSlot = TestContractArtifact.storageLayout['example_set'].slot; oracle.getNotes.mockResolvedValue([ { contractAddress, @@ -861,8 +865,11 @@ describe('Private Execution test suite', () => { expect(newNoteHashes).toHaveLength(1); const noteHash = newNoteHashes[0]; - const storageSlot = computeSlotForMapping(new Fr(1n), owner); - const noteTypeId = new Fr(869710811710178111116101n); // ValueNote + const storageSlot = computeSlotForMapping( + PendingNoteHashesContractArtifact.storageLayout['balances'].slot, + owner, + ); + const noteTypeId = PendingNoteHashesContractArtifact.notes['ValueNote'].id; const innerNoteHash = await acirSimulator.computeInnerNoteHash( contractAddress, @@ -917,8 +924,11 @@ describe('Private Execution test suite', () => { const execInsert = result.nestedExecutions[0]; const execGetThenNullify = result.nestedExecutions[1]; - const storageSlot = computeSlotForMapping(new Fr(1n), owner); - const noteTypeId = new Fr(869710811710178111116101n); // ValueNote + const storageSlot = computeSlotForMapping( + PendingNoteHashesContractArtifact.storageLayout['balances'].slot, + owner, + ); + const noteTypeId = PendingNoteHashesContractArtifact.notes['ValueNote'].id; expect(execInsert.newNotes).toHaveLength(1); const noteAndSlot = execInsert.newNotes[0]; diff --git a/yarn-project/simulator/src/client/simulator.test.ts b/yarn-project/simulator/src/client/simulator.test.ts index 62bb0c13b07..ef9fd366291 100644 --- a/yarn-project/simulator/src/client/simulator.test.ts +++ b/yarn-project/simulator/src/client/simulator.test.ts @@ -59,8 +59,8 @@ describe('Simulator', () => { describe('computeNoteHashAndNullifier', () => { const artifact = getFunctionArtifact(TokenContractArtifact, 'compute_note_hash_and_nullifier'); const nonce = Fr.random(); - const storageSlot = Fr.random(); - const noteTypeId = new Fr(8411110710111078111116101n); // TODO(#5833): This can be imported from artifact now + const storageSlot = TokenContractArtifact.storageLayout['balances'].slot; + const noteTypeId = TokenContractArtifact.notes['TokenNote'].id; const createNote = (amount = 123n) => new Note([new Fr(amount), owner.toField(), Fr.random()]); diff --git a/yarn-project/simulator/src/public/index.test.ts b/yarn-project/simulator/src/public/index.test.ts index 886ebf355fb..8e8fb651afe 100644 --- a/yarn-project/simulator/src/public/index.test.ts +++ b/yarn-project/simulator/src/public/index.test.ts @@ -335,7 +335,7 @@ describe('ACIR public execution simulator', () => { expect(result.newNoteHashes.length).toEqual(1); const expectedNoteHash = computeNoteContentHash([amount, secretHash]); - const storageSlot = new Fr(5); // for pending_shields + const storageSlot = TokenContractArtifact.storageLayout['pending_shields'].slot; const expectedInnerNoteHash = computeInnerNoteHash(storageSlot, expectedNoteHash); expect(result.newNoteHashes[0].value).toEqual(expectedInnerNoteHash); }); diff --git a/yarn-project/types/src/abi/contract_artifact.test.ts b/yarn-project/types/src/abi/contract_artifact.test.ts index de830666e09..1e1bb30e6e7 100644 --- a/yarn-project/types/src/abi/contract_artifact.test.ts +++ b/yarn-project/types/src/abi/contract_artifact.test.ts @@ -4,7 +4,6 @@ import { contractArtifactFromBuffer, contractArtifactToBuffer } from './contract describe('contract_artifact', () => { it('serializes and deserializes an instance', () => { const artifact = getSampleContractArtifact(); - delete artifact.aztecNrVersion; const serialized = contractArtifactToBuffer(artifact); const deserialized = contractArtifactFromBuffer(serialized); expect(deserialized).toEqual(artifact); diff --git a/yarn-project/types/src/abi/contract_artifact.ts b/yarn-project/types/src/abi/contract_artifact.ts index f2c605a0886..0998899167e 100644 --- a/yarn-project/types/src/abi/contract_artifact.ts +++ b/yarn-project/types/src/abi/contract_artifact.ts @@ -2,10 +2,17 @@ import { type ABIParameter, type ABIParameterVisibility, type AbiType, + type BasicValue, type ContractArtifact, + type ContractNote, + type FieldLayout, type FunctionArtifact, FunctionType, + type IntegerValue, + type StructValue, + type TypedStructFieldValue, } from '@aztec/foundation/abi'; +import { Fr } from '@aztec/foundation/fields'; import { AZTEC_INITIALIZER_ATTRIBUTE, @@ -50,6 +57,9 @@ export function contractArtifactFromBuffer(buffer: Buffer): ContractArtifact { if (key === 'bytecode' && typeof value === 'string') { return Buffer.from(value, 'base64'); } + if (typeof value === 'object' && value !== null && value.type === 'Fr') { + return new Fr(BigInt(value.value)); + } return value; }); } @@ -198,6 +208,59 @@ function hasKernelFunctionInputs(params: ABIParameter[]): boolean { return firstParam?.type.kind === 'struct' && firstParam.type.path.includes('ContextInputs'); } +/** + * Generates a storage layout for the contract artifact. + * @param input - The compiled noir contract to get storage layout for + * @returns A storage layout for the contract. + */ +function getStorageLayout(input: NoirCompiledContract) { + const storage = input.outputs.globals.storage ? (input.outputs.globals.storage[0] as StructValue) : { fields: [] }; + const storageFields = storage.fields as TypedStructFieldValue[]; + + if (!storageFields) { + return {}; + } + + return storageFields.reduce((acc: Record, field) => { + const name = field.name; + const slot = field.value.fields[0].value as IntegerValue; + const typ = field.value.fields[1].value as BasicValue<'string', string>; + acc[name] = { + slot: new Fr(BigInt(slot.value)), + typ: typ.value, + }; + return acc; + }, {}); +} + +/** + * Generates records of the notes with note type ids of the artifact. + * @param input - The compiled noir contract to get note types for + * @return A record of the note types and their ids + */ +function getNoteTypes(input: NoirCompiledContract) { + type t = { + kind: string; + fields: [{ kind: string; sign: boolean; value: string }, { kind: string; value: string }]; + }; + + const notes = input.outputs.globals.notes as t[]; + + if (!notes) { + return {}; + } + + return notes.reduce((acc: Record, note) => { + const name = note.fields[1].value as string; + const id = new Fr(BigInt(note.fields[0].value)); + acc[name] = { + id, + typ: name, + }; + return acc; + }, {}); +} + /** * Given a Nargo output generates an Aztec-compatible contract artifact. * @param compiled - Noir build output. @@ -208,6 +271,8 @@ function generateContractArtifact(contract: NoirCompiledContract, aztecNrVersion name: contract.name, functions: contract.functions.map(f => generateFunctionArtifact(f, contract)), outputs: contract.outputs, + storageLayout: getStorageLayout(contract), + notes: getNoteTypes(contract), fileMap: contract.file_map, aztecNrVersion, }; diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 12ef01aca7c..95c28c7b512 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -252,7 +252,6 @@ __metadata: "@types/jest": ^29.5.0 "@types/lodash.camelcase": ^4.3.7 "@types/lodash.capitalize": ^4.2.7 - "@types/lodash.uniqby": ^4.7.9 "@types/node": ^18.7.23 "@types/pako": ^2.0.0 "@types/semver": ^7.5.4 @@ -262,7 +261,6 @@ __metadata: jest: ^29.5.0 lodash.camelcase: ^4.3.0 lodash.capitalize: ^4.2.1 - lodash.uniqby: ^4.7.0 memfs: ^4.6.0 pako: ^2.1.0 semver: ^7.5.4 @@ -3710,15 +3708,6 @@ __metadata: languageName: node linkType: hard -"@types/lodash.uniqby@npm:^4.7.9": - version: 4.7.9 - resolution: "@types/lodash.uniqby@npm:4.7.9" - dependencies: - "@types/lodash": "*" - checksum: 24cc8af36e0d4c52b7294c7ba7d814c89ce2c8118d94350bbed21031fef850fa1a280bfd2b30a47e0b5f7aa6ac649a36a5089aa76bc23787963a5ee6443f631e - languageName: node - linkType: hard - "@types/lodash@npm:*": version: 4.17.0 resolution: "@types/lodash@npm:4.17.0" @@ -9867,13 +9856,6 @@ __metadata: languageName: node linkType: hard -"lodash.uniqby@npm:^4.7.0": - version: 4.7.0 - resolution: "lodash.uniqby@npm:4.7.0" - checksum: 659264545a95726d1493123345aad8cbf56e17810fa9a0b029852c6d42bc80517696af09d99b23bef1845d10d95e01b8b4a1da578f22aeba7a30d3e0022a4938 - languageName: node - linkType: hard - "lodash@npm:^4.17.21": version: 4.17.21 resolution: "lodash@npm:4.17.21" From f497c26b03c6b5f1fc68e0e35eeeb1c20ede7029 Mon Sep 17 00:00:00 2001 From: ludamad Date: Wed, 24 Apr 2024 11:43:27 -0500 Subject: [PATCH 007/201] hotfix: better docker prune --- .github/workflows/setup-runner.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/setup-runner.yml b/.github/workflows/setup-runner.yml index b3685eeea80..abee0767bfe 100644 --- a/.github/workflows/setup-runner.yml +++ b/.github/workflows/setup-runner.yml @@ -105,7 +105,7 @@ jobs: echo '{"default-address-pools":[{"base":"172.17.0.0/12","size":20}, {"base":"10.99.0.0/12","size":20}, {"base":"192.168.0.0/16","size":24}]}' > /etc/docker/daemon.json sudo service docker restart # helps to not overuse space - docker system prune -f || true + docker system prune -f -a || true echo "Configured docker daemon for making many networks." # Run maybe_exit_spot.sh every minute cp scripts/ci/spot_runner_graceful_exit.sh /run/spot_runner_graceful_exit.sh From 8e0a56306ba45ea1eaaa25ee47d84b7334e0bbe3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Bene=C5=A1?= Date: Wed, 24 Apr 2024 20:24:01 +0200 Subject: [PATCH 008/201] feat: AES oracle (#5996) Fixes #5895 --- noir-projects/aztec-nr/aztec/src/oracle.nr | 1 + .../aztec-nr/aztec/src/oracle/encryption.nr | 7 ++++ .../contracts/test_contract/src/main.nr | 8 +++- .../end-to-end/src/e2e_encryption.test.ts | 41 +++++++++++++++++++ .../simulator/src/acvm/oracle/oracle.ts | 13 ++++++ .../simulator/src/acvm/oracle/typed_oracle.ts | 4 ++ .../src/client/client_execution_context.ts | 7 +++- 7 files changed, 79 insertions(+), 2 deletions(-) create mode 100644 noir-projects/aztec-nr/aztec/src/oracle/encryption.nr create mode 100644 yarn-project/end-to-end/src/e2e_encryption.test.ts diff --git a/noir-projects/aztec-nr/aztec/src/oracle.nr b/noir-projects/aztec-nr/aztec/src/oracle.nr index 57415dc9575..753ef6e930a 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle.nr @@ -4,6 +4,7 @@ mod arguments; mod call_private_function; +mod encryption; mod get_contract_instance; mod get_l1_to_l2_membership_witness; mod get_nullifier_membership_witness; diff --git a/noir-projects/aztec-nr/aztec/src/oracle/encryption.nr b/noir-projects/aztec-nr/aztec/src/oracle/encryption.nr new file mode 100644 index 00000000000..cb655c756ce --- /dev/null +++ b/noir-projects/aztec-nr/aztec/src/oracle/encryption.nr @@ -0,0 +1,7 @@ + +#[oracle(aes128Encrypt)] +pub fn aes128_encrypt_oracle(input: [u8; N], iv: [u8; 16], key: [u8; 16]) -> [u8; N] {} + +unconstrained pub fn aes128_encrypt(input: [u8; N], iv: [u8; 16], key: [u8; 16]) -> [u8; N] { + aes128_encrypt_oracle(input, iv, key) +} diff --git a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr index f7a40cf41a8..107df25dc72 100644 --- a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr @@ -25,7 +25,7 @@ contract Test { note_getter_options::NoteStatus }, deploy::deploy_contract as aztec_deploy_contract, - oracle::{get_public_key::get_public_key as get_public_key_oracle, unsafe_rand::unsafe_rand} + oracle::{encryption::aes128_encrypt, get_public_key::get_public_key as get_public_key_oracle, unsafe_rand::unsafe_rand} }; use dep::token_portal_content_hash_lib::{get_mint_private_content_hash, get_mint_public_content_hash}; use dep::value_note::value_note::ValueNote; @@ -309,6 +309,12 @@ contract Test { assert(context.version() == version, "Invalid version"); } + #[aztec(private)] + fn encrypt(input: [u8; 64], iv: [u8; 16], key: [u8; 16]) { + let result = aes128_encrypt(input, iv, key); + context.emit_unencrypted_log(result); + } + #[aztec(public)] fn assert_public_global_vars( chain_id: Field, diff --git a/yarn-project/end-to-end/src/e2e_encryption.test.ts b/yarn-project/end-to-end/src/e2e_encryption.test.ts new file mode 100644 index 00000000000..861a7c573c3 --- /dev/null +++ b/yarn-project/end-to-end/src/e2e_encryption.test.ts @@ -0,0 +1,41 @@ +import { type Wallet } from '@aztec/aztec.js'; +import { Aes128 } from '@aztec/circuits.js/barretenberg'; +import { TestContract } from '@aztec/noir-contracts.js'; + +import { randomBytes } from 'crypto'; + +import { setup } from './fixtures/utils.js'; + +describe('e2e_encryption', () => { + const aes128 = new Aes128(); + + let wallet: Wallet; + let teardown: () => Promise; + + let contract: TestContract; + + beforeAll(async () => { + ({ teardown, wallet } = await setup()); + contract = await TestContract.deploy(wallet).send().deployed(); + }, 25_000); + + afterAll(() => teardown()); + + it('encrypts', async () => { + const input = randomBytes(64); + const iv = randomBytes(16); + const key = randomBytes(16); + + const expectedCiphertext = aes128.encryptBufferCBC(input, iv, key); + + const logs = await contract.methods + .encrypt(Array.from(input), Array.from(iv), Array.from(key)) + .send() + .getUnencryptedLogs(); + // Each byte of encrypted data is in its own field and it's all serialized into a long buffer so we simply extract + // each 32nd byte from the buffer to get the encrypted data + const recoveredCiphertext = logs.logs[0].log.data.filter((_, i) => (i + 1) % 32 === 0); + + expect(recoveredCiphertext).toEqual(expectedCiphertext); + }); +}); diff --git a/yarn-project/simulator/src/acvm/oracle/oracle.ts b/yarn-project/simulator/src/acvm/oracle/oracle.ts index ac88228ef08..f112db7d65a 100644 --- a/yarn-project/simulator/src/acvm/oracle/oracle.ts +++ b/yarn-project/simulator/src/acvm/oracle/oracle.ts @@ -378,4 +378,17 @@ export class Oracle { ); return toAcvmEnqueuePublicFunctionResult(enqueuedRequest); } + + aes128Encrypt(input: ACVMField[], initializationVector: ACVMField[], key: ACVMField[]): ACVMField[] { + // Convert each field to a number and then to a buffer (1 byte is stored in 1 field) + const processedInput = Buffer.from(input.map(fromACVMField).map(f => f.toNumber())); + const processedIV = Buffer.from(initializationVector.map(fromACVMField).map(f => f.toNumber())); + const processedKey = Buffer.from(key.map(fromACVMField).map(f => f.toNumber())); + + // Encrypt the input + const ciphertext = this.typedOracle.aes128Encrypt(processedInput, processedIV, processedKey); + + // Convert each byte of ciphertext to a field and return it + return Array.from(ciphertext).map(byte => toACVMField(byte)); + } } diff --git a/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts b/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts index 70f57233af1..0771458a972 100644 --- a/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts +++ b/yarn-project/simulator/src/acvm/oracle/typed_oracle.ts @@ -233,4 +233,8 @@ export abstract class TypedOracle { ): Promise { throw new OracleMethodNotAvailableError('enqueuePublicFunctionCall'); } + + aes128Encrypt(_input: Buffer, _initializationVector: Buffer, _key: Buffer): Buffer { + throw new OracleMethodNotAvailableError('encrypt'); + } } diff --git a/yarn-project/simulator/src/client/client_execution_context.ts b/yarn-project/simulator/src/client/client_execution_context.ts index 0ec755281cc..0bcb1f07dbb 100644 --- a/yarn-project/simulator/src/client/client_execution_context.ts +++ b/yarn-project/simulator/src/client/client_execution_context.ts @@ -21,7 +21,7 @@ import { type SideEffect, type TxContext, } from '@aztec/circuits.js'; -import { type Grumpkin } from '@aztec/circuits.js/barretenberg'; +import { Aes128, type Grumpkin } from '@aztec/circuits.js/barretenberg'; import { computePublicDataTreeLeafSlot, computeUniqueNoteHash, siloNoteHash } from '@aztec/circuits.js/hash'; import { type FunctionAbi, type FunctionArtifact, countArgumentsSize } from '@aztec/foundation/abi'; import { type AztecAddress } from '@aztec/foundation/aztec-address'; @@ -522,4 +522,9 @@ export class ClientExecutionContext extends ViewDataOracle { } return values; } + + public override aes128Encrypt(input: Buffer, initializationVector: Buffer, key: Buffer): Buffer { + const aes128 = new Aes128(); + return aes128.encryptBufferCBC(input, initializationVector, key); + } } From 3b9179118369137880277f1444f0e3f94b3f5e79 Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Wed, 24 Apr 2024 19:28:13 +0100 Subject: [PATCH 009/201] feat: Sync from noir (#6007) Automated pull of development from the [noir](https://github.com/noir-lang/noir) programming language, a dependency of Aztec. BEGIN_COMMIT_OVERRIDE fix: Nested array equality (https://github.com/noir-lang/noir/pull/4903) feat: Handle `BrilligCall` opcodes in the debugger (https://github.com/noir-lang/noir/pull/4897) chore: Release Noir(0.28.0) (https://github.com/noir-lang/noir/pull/4776) feat: Sync from aztec-packages (https://github.com/noir-lang/noir/pull/4902) END_COMMIT_OVERRIDE --------- Co-authored-by: Tom French Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> --- .noir-sync-commit | 2 +- avm-transpiler/Cargo.lock | 32 +- .../actions/install-playwright/action.yml | 22 +- noir/noir-repo/.release-please-manifest.json | 4 +- noir/noir-repo/CHANGELOG.md | 83 +++ noir/noir-repo/Cargo.lock | 54 +- noir/noir-repo/Cargo.toml | 16 +- noir/noir-repo/acvm-repo/CHANGELOG.md | 119 ++++ noir/noir-repo/acvm-repo/acir/Cargo.toml | 2 +- .../noir-repo/acvm-repo/acir_field/Cargo.toml | 2 +- noir/noir-repo/acvm-repo/acvm/Cargo.toml | 2 +- .../acvm-repo/acvm/src/pwg/brillig.rs | 13 - noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs | 23 +- noir/noir-repo/acvm-repo/acvm_js/Cargo.toml | 2 +- noir/noir-repo/acvm-repo/acvm_js/package.json | 2 +- .../acvm-repo/blackbox_solver/Cargo.toml | 2 +- .../bn254_blackbox_solver/Cargo.toml | 2 +- noir/noir-repo/acvm-repo/brillig/Cargo.toml | 2 +- .../noir-repo/acvm-repo/brillig_vm/Cargo.toml | 2 +- .../src/ssa/ssa_gen/context.rs | 108 +--- .../noirc_frontend/src/hir/type_check/expr.rs | 30 - noir/noir-repo/compiler/wasm/package.json | 2 +- .../explainers/explainer-oracle.md | 57 ++ .../explainers/explainer-recursion.md | 176 ++++++ .../getting_started/_category_.json | 5 + .../hello_noir/_category_.json | 5 + .../getting_started/hello_noir/index.md | 142 +++++ .../hello_noir/project_breakdown.md | 199 ++++++ .../installation/_category_.json | 6 + .../getting_started/installation/index.md | 48 ++ .../installation/other_install_methods.md | 102 ++++ .../getting_started/tooling/noir_codegen.md | 113 ++++ .../version-v0.28.0/how_to/_category_.json | 5 + .../how_to/debugger/_category_.json | 6 + .../debugger/debugging_with_the_repl.md | 164 +++++ .../how_to/debugger/debugging_with_vs_code.md | 68 +++ .../version-v0.28.0/how_to/how-to-oracles.md | 276 +++++++++ .../how_to/how-to-recursion.md | 179 ++++++ .../how_to/how-to-solidity-verifier.md | 231 +++++++ .../version-v0.28.0/how_to/merkle-proof.mdx | 49 ++ .../how_to/using-devcontainers.mdx | 110 ++++ .../versioned_docs/version-v0.28.0/index.mdx | 67 ++ .../version-v0.28.0/migration_notes.md | 105 ++++ .../noir/concepts/_category_.json | 6 + .../version-v0.28.0/noir/concepts/assert.md | 45 ++ .../version-v0.28.0/noir/concepts/comments.md | 33 + .../noir/concepts/control_flow.md | 77 +++ .../version-v0.28.0/noir/concepts/data_bus.md | 21 + .../noir/concepts/data_types/_category_.json | 5 + .../noir/concepts/data_types/arrays.md | 251 ++++++++ .../noir/concepts/data_types/booleans.md | 31 + .../noir/concepts/data_types/fields.md | 192 ++++++ .../concepts/data_types/function_types.md | 26 + .../noir/concepts/data_types/index.md | 110 ++++ .../noir/concepts/data_types/integers.md | 155 +++++ .../noir/concepts/data_types/references.md | 23 + .../noir/concepts/data_types/slices.mdx | 195 ++++++ .../noir/concepts/data_types/strings.md | 80 +++ .../noir/concepts/data_types/structs.md | 70 +++ .../noir/concepts/data_types/tuples.md | 48 ++ .../version-v0.28.0/noir/concepts/distinct.md | 64 ++ .../noir/concepts/functions.md | 226 +++++++ .../version-v0.28.0/noir/concepts/generics.md | 106 ++++ .../version-v0.28.0/noir/concepts/globals.md | 72 +++ .../version-v0.28.0/noir/concepts/lambdas.md | 81 +++ .../noir/concepts/mutability.md | 121 ++++ .../version-v0.28.0/noir/concepts/ops.md | 98 +++ .../version-v0.28.0/noir/concepts/oracles.md | 31 + .../noir/concepts/shadowing.md | 44 ++ .../version-v0.28.0/noir/concepts/traits.md | 389 ++++++++++++ .../noir/concepts/unconstrained.md | 99 +++ .../modules_packages_crates/_category_.json | 6 + .../crates_and_packages.md | 43 ++ .../modules_packages_crates/dependencies.md | 124 ++++ .../noir/modules_packages_crates/modules.md | 105 ++++ .../modules_packages_crates/workspaces.md | 42 ++ .../noir/standard_library/_category_.json | 6 + .../noir/standard_library/bigint.md | 122 ++++ .../noir/standard_library/black_box_fns.md | 31 + .../noir/standard_library/bn254.md | 46 ++ .../standard_library/containers/boundedvec.md | 326 ++++++++++ .../standard_library/containers/hashmap.md | 570 ++++++++++++++++++ .../noir/standard_library/containers/index.md | 5 + .../noir/standard_library/containers/vec.mdx | 151 +++++ .../cryptographic_primitives/_category_.json | 5 + .../cryptographic_primitives/ec_primitives.md | 102 ++++ .../ecdsa_sig_verification.mdx | 98 +++ .../cryptographic_primitives/eddsa.mdx | 37 ++ .../cryptographic_primitives/hashes.mdx | 250 ++++++++ .../cryptographic_primitives/index.md | 14 + .../cryptographic_primitives/scalar.mdx | 33 + .../cryptographic_primitives/schnorr.mdx | 64 ++ .../noir/standard_library/logging.md | 78 +++ .../noir/standard_library/merkle_trees.md | 58 ++ .../noir/standard_library/options.md | 101 ++++ .../noir/standard_library/recursion.md | 88 +++ .../noir/standard_library/traits.md | 410 +++++++++++++ .../noir/standard_library/zeroed.md | 26 + .../NoirJS/backend_barretenberg/.nojekyll | 1 + .../classes/BarretenbergBackend.md | 160 +++++ .../classes/BarretenbergVerifier.md | 58 ++ .../NoirJS/backend_barretenberg/index.md | 59 ++ .../type-aliases/BackendOptions.md | 21 + .../backend_barretenberg/typedoc-sidebar.cjs | 4 + .../reference/NoirJS/noir_js/.nojekyll | 1 + .../reference/NoirJS/noir_js/classes/Noir.md | 132 ++++ .../reference/NoirJS/noir_js/functions/and.md | 22 + .../NoirJS/noir_js/functions/blake2s256.md | 21 + .../functions/ecdsa_secp256k1_verify.md | 28 + .../functions/ecdsa_secp256r1_verify.md | 28 + .../NoirJS/noir_js/functions/keccak256.md | 21 + .../NoirJS/noir_js/functions/sha256.md | 21 + .../reference/NoirJS/noir_js/functions/xor.md | 22 + .../reference/NoirJS/noir_js/index.md | 54 ++ .../type-aliases/ForeignCallHandler.md | 24 + .../noir_js/type-aliases/ForeignCallInput.md | 9 + .../noir_js/type-aliases/ForeignCallOutput.md | 9 + .../NoirJS/noir_js/type-aliases/WitnessMap.md | 9 + .../NoirJS/noir_js/typedoc-sidebar.cjs | 4 + .../reference/NoirJS/noir_wasm/.nojekyll | 1 + .../NoirJS/noir_wasm/functions/compile.md | 51 ++ .../noir_wasm/functions/compile_contract.md | 51 ++ .../noir_wasm/functions/createFileManager.md | 21 + .../functions/inflateDebugSymbols.md | 21 + .../reference/NoirJS/noir_wasm/index.md | 49 ++ .../NoirJS/noir_wasm/typedoc-sidebar.cjs | 4 + .../version-v0.28.0/reference/_category_.json | 5 + .../reference/debugger/_category_.json | 6 + .../debugger/debugger_known_limitations.md | 59 ++ .../reference/debugger/debugger_repl.md | 360 +++++++++++ .../reference/debugger/debugger_vscode.md | 82 +++ .../reference/nargo_commands.md | 381 ++++++++++++ .../version-v0.28.0/tooling/debugger.md | 27 + .../tooling/language_server.md | 43 ++ .../version-v0.28.0/tooling/testing.md | 62 ++ .../version-v0.28.0/tutorials/noirjs_app.md | 326 ++++++++++ .../version-v0.28.0-sidebars.json | 93 +++ .../regression_4383/Nargo.toml | 7 + .../regression_4383/src/main.nr | 3 + .../noir-repo/tooling/debugger/src/context.rs | 169 +++--- noir/noir-repo/tooling/debugger/src/repl.rs | 61 +- .../tooling/noir_codegen/package.json | 2 +- noir/noir-repo/tooling/noir_js/package.json | 2 +- .../tooling/noir_js_types/package.json | 2 +- .../tooling/noirc_abi_wasm/package.json | 2 +- yarn-project/yarn.lock | 2 +- 146 files changed, 10529 insertions(+), 350 deletions(-) create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/explainers/explainer-oracle.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/explainers/explainer-recursion.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/_category_.json create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/hello_noir/_category_.json create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/hello_noir/index.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/hello_noir/project_breakdown.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/installation/_category_.json create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/installation/index.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/installation/other_install_methods.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/tooling/noir_codegen.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/_category_.json create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/debugger/_category_.json create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/debugger/debugging_with_the_repl.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/debugger/debugging_with_vs_code.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/how-to-oracles.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/how-to-recursion.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/how-to-solidity-verifier.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/merkle-proof.mdx create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/using-devcontainers.mdx create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/index.mdx create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/migration_notes.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/_category_.json create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/assert.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/comments.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/control_flow.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_bus.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/_category_.json create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/arrays.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/booleans.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/fields.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/function_types.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/index.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/integers.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/references.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/slices.mdx create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/strings.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/structs.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/tuples.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/distinct.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/functions.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/generics.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/globals.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/lambdas.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/mutability.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/ops.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/oracles.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/shadowing.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/traits.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/unconstrained.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/modules_packages_crates/_category_.json create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/modules_packages_crates/crates_and_packages.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/modules_packages_crates/dependencies.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/modules_packages_crates/modules.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/modules_packages_crates/workspaces.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/_category_.json create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/bigint.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/black_box_fns.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/bn254.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/containers/boundedvec.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/containers/hashmap.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/containers/index.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/containers/vec.mdx create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/_category_.json create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/ec_primitives.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/eddsa.mdx create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/hashes.mdx create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/index.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/scalar.mdx create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/schnorr.mdx create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/logging.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/merkle_trees.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/options.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/recursion.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/traits.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/zeroed.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/.nojekyll create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergVerifier.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/index.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/.nojekyll create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/classes/Noir.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/and.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/blake2s256.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/keccak256.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/sha256.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/xor.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/index.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/type-aliases/WitnessMap.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/typedoc-sidebar.cjs create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/.nojekyll create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/functions/compile.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/functions/compile_contract.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/functions/createFileManager.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/functions/inflateDebugSymbols.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/index.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/typedoc-sidebar.cjs create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/_category_.json create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/debugger/_category_.json create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/debugger/debugger_known_limitations.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/debugger/debugger_repl.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/debugger/debugger_vscode.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/nargo_commands.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/tooling/debugger.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/tooling/language_server.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/tooling/testing.md create mode 100644 noir/noir-repo/docs/versioned_docs/version-v0.28.0/tutorials/noirjs_app.md create mode 100644 noir/noir-repo/docs/versioned_sidebars/version-v0.28.0-sidebars.json create mode 100644 noir/noir-repo/test_programs/execution_success/regression_4383/Nargo.toml create mode 100644 noir/noir-repo/test_programs/execution_success/regression_4383/src/main.nr diff --git a/.noir-sync-commit b/.noir-sync-commit index 5aba57a74d0..19e9a0d3012 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -5985e4285de9e29f7c986103a49fdaec59228887 +0cf2e2a1b8d247bed03ba5b7b1be5cd30f0d51b2 diff --git a/avm-transpiler/Cargo.lock b/avm-transpiler/Cargo.lock index 21ff357893f..e145398e26e 100644 --- a/avm-transpiler/Cargo.lock +++ b/avm-transpiler/Cargo.lock @@ -4,7 +4,7 @@ version = 3 [[package]] name = "acir" -version = "0.43.0" +version = "0.44.0" dependencies = [ "acir_field", "base64 0.21.7", @@ -18,7 +18,7 @@ dependencies = [ [[package]] name = "acir_field" -version = "0.43.0" +version = "0.44.0" dependencies = [ "ark-bn254", "ark-ff", @@ -31,7 +31,7 @@ dependencies = [ [[package]] name = "acvm" -version = "0.43.0" +version = "0.44.0" dependencies = [ "acir", "acvm_blackbox_solver", @@ -44,7 +44,7 @@ dependencies = [ [[package]] name = "acvm_blackbox_solver" -version = "0.43.0" +version = "0.44.0" dependencies = [ "acir", "blake2", @@ -161,7 +161,7 @@ dependencies = [ [[package]] name = "arena" -version = "0.27.0" +version = "0.28.0" [[package]] name = "ark-bn254" @@ -323,7 +323,7 @@ dependencies = [ [[package]] name = "aztec_macros" -version = "0.27.0" +version = "0.28.0" dependencies = [ "convert_case", "iter-extended", @@ -434,7 +434,7 @@ dependencies = [ [[package]] name = "brillig" -version = "0.43.0" +version = "0.44.0" dependencies = [ "acir_field", "serde", @@ -442,7 +442,7 @@ dependencies = [ [[package]] name = "brillig_vm" -version = "0.43.0" +version = "0.44.0" dependencies = [ "acir", "acvm_blackbox_solver", @@ -854,7 +854,7 @@ dependencies = [ [[package]] name = "fm" -version = "0.27.0" +version = "0.28.0" dependencies = [ "codespan-reporting", "serde", @@ -1032,7 +1032,7 @@ dependencies = [ [[package]] name = "iter-extended" -version = "0.27.0" +version = "0.28.0" [[package]] name = "itertools" @@ -1197,7 +1197,7 @@ checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" [[package]] name = "noirc_abi" -version = "0.27.0" +version = "0.28.0" dependencies = [ "acvm", "iter-extended", @@ -1212,7 +1212,7 @@ dependencies = [ [[package]] name = "noirc_driver" -version = "0.27.0" +version = "0.28.0" dependencies = [ "acvm", "aztec_macros", @@ -1233,7 +1233,7 @@ dependencies = [ [[package]] name = "noirc_errors" -version = "0.27.0" +version = "0.28.0" dependencies = [ "acvm", "base64 0.21.7", @@ -1251,7 +1251,7 @@ dependencies = [ [[package]] name = "noirc_evaluator" -version = "0.27.0" +version = "0.28.0" dependencies = [ "acvm", "chrono", @@ -1268,7 +1268,7 @@ dependencies = [ [[package]] name = "noirc_frontend" -version = "0.27.0" +version = "0.28.0" dependencies = [ "acvm", "arena", @@ -1293,7 +1293,7 @@ dependencies = [ [[package]] name = "noirc_printable_type" -version = "0.27.0" +version = "0.28.0" dependencies = [ "acvm", "iter-extended", diff --git a/noir/noir-repo/.github/actions/install-playwright/action.yml b/noir/noir-repo/.github/actions/install-playwright/action.yml index a70c45d152a..0bd61b38c49 100644 --- a/noir/noir-repo/.github/actions/install-playwright/action.yml +++ b/noir/noir-repo/.github/actions/install-playwright/action.yml @@ -4,21 +4,21 @@ description: Installs Playwright and its dependencies and caches them. runs: using: composite steps: - - name: Query playwright version - shell: bash - run: echo "PLAYWRIGHT_VERSION=$(yarn workspace @noir-lang/noirc_abi info @web/test-runner-playwright --json | jq .children.Version | tr -d '"')" >> $GITHUB_ENV + # - name: Query playwright version + # shell: bash + # run: echo "PLAYWRIGHT_VERSION=$(yarn workspace @noir-lang/noirc_abi info @web/test-runner-playwright --json | jq .children.Version | tr -d '"')" >> $GITHUB_ENV - - name: Cache playwright binaries - uses: actions/cache@v4 - id: playwright-cache - with: - path: | - ~/.cache/ms-playwright - key: ${{ runner.os }}-playwright-${{ env.PLAYWRIGHT_VERSION }} + # - name: Cache playwright binaries + # uses: actions/cache@v4 + # id: playwright-cache + # with: + # path: | + # ~/.cache/ms-playwright + # key: ${{ runner.os }}-playwright-${{ env.PLAYWRIGHT_VERSION }} - name: Install playwright deps shell: bash - if: steps.playwright-cache.outputs.cache-hit != 'true' + # if: steps.playwright-cache.outputs.cache-hit != 'true' run: | # Workaround: https://github.com/microsoft/playwright/issues/30503#issuecomment-2074783821 sudo rm /etc/apt/sources.list.d/microsoft-prod.list diff --git a/noir/noir-repo/.release-please-manifest.json b/noir/noir-repo/.release-please-manifest.json index 7579928c999..e20b12e68ef 100644 --- a/noir/noir-repo/.release-please-manifest.json +++ b/noir/noir-repo/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "0.27.0", - "acvm-repo": "0.43.0" + ".": "0.28.0", + "acvm-repo": "0.44.0" } \ No newline at end of file diff --git a/noir/noir-repo/CHANGELOG.md b/noir/noir-repo/CHANGELOG.md index a084811fc21..148ecfac876 100644 --- a/noir/noir-repo/CHANGELOG.md +++ b/noir/noir-repo/CHANGELOG.md @@ -1,5 +1,88 @@ # Changelog +## [0.28.0](https://github.com/noir-lang/noir/compare/v0.27.0...v0.28.0) (2024-04-24) + + +### ⚠ BREAKING CHANGES + +* Add `as_array` and remove `_slice` variants of hash functions ([#4675](https://github.com/noir-lang/noir/issues/4675)) +* reserve keyword `super` ([#4836](https://github.com/noir-lang/noir/issues/4836)) +* contract interfaces and better function calls (https://github.com/AztecProtocol/aztec-packages/pull/5687) +* change backend width to 4 (https://github.com/AztecProtocol/aztec-packages/pull/5374) +* Use fixed size arrays in black box functions where sizes are known (https://github.com/AztecProtocol/aztec-packages/pull/5620) +* trap with revert data (https://github.com/AztecProtocol/aztec-packages/pull/5732) +* **acir:** BrilligCall opcode (https://github.com/AztecProtocol/aztec-packages/pull/5709) +* remove fixed-length keccak256 (https://github.com/AztecProtocol/aztec-packages/pull/5617) +* storage_layout and `#[aztec(storage)]` (https://github.com/AztecProtocol/aztec-packages/pull/5387) +* **acir:** Add predicate to call opcode (https://github.com/AztecProtocol/aztec-packages/pull/5616) +* contract_abi-exports (https://github.com/AztecProtocol/aztec-packages/pull/5386) + +### Features + +* **acir_gen:** Brillig stdlib ([#4848](https://github.com/noir-lang/noir/issues/4848)) ([0c8175c](https://github.com/noir-lang/noir/commit/0c8175cb539efd9427c73ae5af0d48abe688ebab)) +* **acir:** Add predicate to call opcode (https://github.com/AztecProtocol/aztec-packages/pull/5616) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* **acir:** BrilligCall opcode (https://github.com/AztecProtocol/aztec-packages/pull/5709) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Add `min` and `max` functions to the stdlib ([#4839](https://github.com/noir-lang/noir/issues/4839)) ([6cfb328](https://github.com/noir-lang/noir/commit/6cfb328d0d162eaa20ad1a118d085e03a52d049d)) +* Add `NARGO_FOREIGN_CALL_TIMEOUT` environment variable ([#4780](https://github.com/noir-lang/noir/issues/4780)) ([791f1c8](https://github.com/noir-lang/noir/commit/791f1c8522d49972dad4eb940f9cad437e28b25b)) +* Add comptime Interpreter ([#4821](https://github.com/noir-lang/noir/issues/4821)) ([5992436](https://github.com/noir-lang/noir/commit/599243633281e6827f0f4f095fb12d313e0125fa)) +* Add return values to aztec fns (https://github.com/AztecProtocol/aztec-packages/pull/5389) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Allow numeric generics to non inlined ACIR functions ([#4834](https://github.com/noir-lang/noir/issues/4834)) ([9cc03a4](https://github.com/noir-lang/noir/commit/9cc03a4d6f714a1b2d31c6982eb8e791ba5c869c)) +* **avm:** Integrate AVM with initializers (https://github.com/AztecProtocol/aztec-packages/pull/5469) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Brillig heterogeneous memory cells (https://github.com/AztecProtocol/aztec-packages/pull/5608) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) +* Brillig pointer codegen and execution (https://github.com/AztecProtocol/aztec-packages/pull/5737) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Change backend width to 4 (https://github.com/AztecProtocol/aztec-packages/pull/5374) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Contract interfaces and better function calls (https://github.com/AztecProtocol/aztec-packages/pull/5687) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Contract_abi-exports (https://github.com/AztecProtocol/aztec-packages/pull/5386) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* **experimental:** Add `comptime` keyword ([#4840](https://github.com/noir-lang/noir/issues/4840)) ([4dfd7f0](https://github.com/noir-lang/noir/commit/4dfd7f03bc1b9cf57f5829c435a560bed53b7f46)) +* Get last mock oracles params ([#4789](https://github.com/noir-lang/noir/issues/4789)) ([1d96937](https://github.com/noir-lang/noir/commit/1d96937a8e94a91c0c17c97102498d067fca76c3)) +* Impl of missing functionality in new key store (https://github.com/AztecProtocol/aztec-packages/pull/5750) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Implement `Eq` trait on `BoundedVec` ([#4830](https://github.com/noir-lang/noir/issues/4830)) ([6cefe16](https://github.com/noir-lang/noir/commit/6cefe16deb643951c0cc552d08e22272900ed456)) +* Lalrpop lexer prototype ([#4656](https://github.com/noir-lang/noir/issues/4656)) ([25ad018](https://github.com/noir-lang/noir/commit/25ad018a55b61dd861e899f050c48200f0a00430)) +* **nargo:** Handle call stacks for multiple Acir calls ([#4711](https://github.com/noir-lang/noir/issues/4711)) ([5b23171](https://github.com/noir-lang/noir/commit/5b231714740447d82cde7cdbe65d4a8b46a31df4)) +* Narrow ABI encoding errors down to target problem argument/field ([#4798](https://github.com/noir-lang/noir/issues/4798)) ([e412e6e](https://github.com/noir-lang/noir/commit/e412e6e30910472b9d5f9000370ce5138ad39ce7)) +* Proving the rollup circuits (https://github.com/AztecProtocol/aztec-packages/pull/5599) ([5b352d6](https://github.com/noir-lang/noir/commit/5b352d6266c40522f5626f79d2f36a409b482aaa)) +* Reserve keyword `super` ([#4836](https://github.com/noir-lang/noir/issues/4836)) ([d5028a6](https://github.com/noir-lang/noir/commit/d5028a613e5a65ad1286dd20ce0fb0313f19f6ee)) +* Restore hashing args via slice for performance (https://github.com/AztecProtocol/aztec-packages/pull/5539) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Simplify `BoundedVec::eq` ([#4838](https://github.com/noir-lang/noir/issues/4838)) ([3d33a33](https://github.com/noir-lang/noir/commit/3d33a33e74c3e7d0fc511059b07f0ef9ddd9b667)) +* **simulator:** Fetch return values at circuit execution (https://github.com/AztecProtocol/aztec-packages/pull/5642) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) +* Split `backend_barretenburg` into prover and verifier classes ([#4769](https://github.com/noir-lang/noir/issues/4769)) ([ce1e662](https://github.com/noir-lang/noir/commit/ce1e6624ece3c91f06b0273af9ba88e703c1b589)) +* Storage_layout and `#[aztec(storage)]` (https://github.com/AztecProtocol/aztec-packages/pull/5387) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5572) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5619) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5697) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5725) ([5b352d6](https://github.com/noir-lang/noir/commit/5b352d6266c40522f5626f79d2f36a409b482aaa)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5794) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5814) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5935) ([1b867b1](https://github.com/noir-lang/noir/commit/1b867b121fba5db3087ca845b4934e6732b23fd1)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5955) ([1b867b1](https://github.com/noir-lang/noir/commit/1b867b121fba5db3087ca845b4934e6732b23fd1)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5999) ([1b867b1](https://github.com/noir-lang/noir/commit/1b867b121fba5db3087ca845b4934e6732b23fd1)) +* Trap with revert data (https://github.com/AztecProtocol/aztec-packages/pull/5732) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Unroll loops iteratively ([#4779](https://github.com/noir-lang/noir/issues/4779)) ([f831b0b](https://github.com/noir-lang/noir/commit/f831b0bdbf99cab1bcd24d494c4546a36309465e)) +* Use fixed size arrays in black box functions where sizes are known (https://github.com/AztecProtocol/aztec-packages/pull/5620) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Variable length returns (https://github.com/AztecProtocol/aztec-packages/pull/5633) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) + + +### Bug Fixes + +* ArrayGet and Set are not pure ([#4783](https://github.com/noir-lang/noir/issues/4783)) ([90ee479](https://github.com/noir-lang/noir/commit/90ee4792c8e7115e55a3b1dadd1e43066ad8ac66)) +* Avoid huge unrolling in hash_args (https://github.com/AztecProtocol/aztec-packages/pull/5703) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) +* Catch panics from EC point creation (e.g. the point is at infinity) ([#4790](https://github.com/noir-lang/noir/issues/4790)) ([645dba1](https://github.com/noir-lang/noir/commit/645dba192f16ef34018828186ffb297422a8dc73)) +* Don't reuse brillig with slice arguments (https://github.com/AztecProtocol/aztec-packages/pull/5800) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* **experimental:** Skip over comptime functions in scan pass ([#4893](https://github.com/noir-lang/noir/issues/4893)) ([f267d42](https://github.com/noir-lang/noir/commit/f267d4205b46317eacb1c247c9dca0e7698d1259)) +* Fix curve parameters for bigints ([#4900](https://github.com/noir-lang/noir/issues/4900)) ([5985e42](https://github.com/noir-lang/noir/commit/5985e4285de9e29f7c986103a49fdaec59228887)) +* Fix panic when returning a zeroed unit value ([#4797](https://github.com/noir-lang/noir/issues/4797)) ([2ea9292](https://github.com/noir-lang/noir/commit/2ea92926956658ea99d8fb97734831eba00d3a4b)) +* Issue 4682 and add solver for unconstrained bigintegers ([#4729](https://github.com/noir-lang/noir/issues/4729)) ([e4d33c1](https://github.com/noir-lang/noir/commit/e4d33c126a2795d9aaa6048d4e91b64cb4bbe4f2)) +* Primary_message typo in errors.rs (https://github.com/AztecProtocol/aztec-packages/pull/5646) ([5b352d6](https://github.com/noir-lang/noir/commit/5b352d6266c40522f5626f79d2f36a409b482aaa)) +* Proper field inversion for bigints ([#4802](https://github.com/noir-lang/noir/issues/4802)) ([b46d0e3](https://github.com/noir-lang/noir/commit/b46d0e39f4252f8bbaa987f88d112e4c233b3d61)) +* Reset the noir-gates-diff report on master ([#4878](https://github.com/noir-lang/noir/issues/4878)) ([50bc325](https://github.com/noir-lang/noir/commit/50bc32587a837c930ed14175c98ace1530c54bef)) +* Update noir-gates-diff commit to use master reference report ([#4891](https://github.com/noir-lang/noir/issues/4891)) ([4a3ffb7](https://github.com/noir-lang/noir/commit/4a3ffb7b4c5cdd5fcadb19e7f251b1ee27b0c02b)) + + +### Miscellaneous Chores + +* Add `as_array` and remove `_slice` variants of hash functions ([#4675](https://github.com/noir-lang/noir/issues/4675)) ([8e39706](https://github.com/noir-lang/noir/commit/8e39706cbb51f27b42fbe851aaa6a67070d07c74)) +* Remove fixed-length keccak256 (https://github.com/AztecProtocol/aztec-packages/pull/5617) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) + ## [0.27.0](https://github.com/noir-lang/noir/compare/v0.26.0...v0.27.0) (2024-04-10) diff --git a/noir/noir-repo/Cargo.lock b/noir/noir-repo/Cargo.lock index 9c9e13b57c9..8a8ccfdbf8a 100644 --- a/noir/noir-repo/Cargo.lock +++ b/noir/noir-repo/Cargo.lock @@ -4,7 +4,7 @@ version = 3 [[package]] name = "acir" -version = "0.43.0" +version = "0.44.0" dependencies = [ "acir_field", "base64 0.21.2", @@ -26,7 +26,7 @@ dependencies = [ [[package]] name = "acir_field" -version = "0.43.0" +version = "0.44.0" dependencies = [ "ark-bls12-381", "ark-bn254", @@ -40,7 +40,7 @@ dependencies = [ [[package]] name = "acvm" -version = "0.43.0" +version = "0.44.0" dependencies = [ "acir", "acvm_blackbox_solver", @@ -56,7 +56,7 @@ dependencies = [ [[package]] name = "acvm_blackbox_solver" -version = "0.43.0" +version = "0.44.0" dependencies = [ "acir", "blake2", @@ -92,7 +92,7 @@ dependencies = [ [[package]] name = "acvm_js" -version = "0.43.0" +version = "0.44.0" dependencies = [ "acvm", "bn254_blackbox_solver", @@ -235,7 +235,7 @@ checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" [[package]] name = "arena" -version = "0.27.0" +version = "0.28.0" [[package]] name = "ark-bls12-381" @@ -445,7 +445,7 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "aztec_macros" -version = "0.27.0" +version = "0.28.0" dependencies = [ "convert_case 0.6.0", "iter-extended", @@ -456,7 +456,7 @@ dependencies = [ [[package]] name = "backend-interface" -version = "0.27.0" +version = "0.28.0" dependencies = [ "acvm", "bb_abstraction_leaks", @@ -612,7 +612,7 @@ dependencies = [ [[package]] name = "bn254_blackbox_solver" -version = "0.43.0" +version = "0.44.0" dependencies = [ "acir", "acvm_blackbox_solver", @@ -634,7 +634,7 @@ dependencies = [ [[package]] name = "brillig" -version = "0.43.0" +version = "0.44.0" dependencies = [ "acir_field", "serde", @@ -642,7 +642,7 @@ dependencies = [ [[package]] name = "brillig_vm" -version = "0.43.0" +version = "0.44.0" dependencies = [ "acir", "acvm_blackbox_solver", @@ -1766,7 +1766,7 @@ dependencies = [ [[package]] name = "fm" -version = "0.27.0" +version = "0.28.0" dependencies = [ "codespan-reporting", "iter-extended", @@ -2387,7 +2387,7 @@ dependencies = [ [[package]] name = "iter-extended" -version = "0.27.0" +version = "0.28.0" [[package]] name = "itertools" @@ -2812,7 +2812,7 @@ checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389" [[package]] name = "nargo" -version = "0.27.0" +version = "0.28.0" dependencies = [ "acvm", "codespan-reporting", @@ -2838,7 +2838,7 @@ dependencies = [ [[package]] name = "nargo_cli" -version = "0.27.0" +version = "0.28.0" dependencies = [ "acvm", "assert_cmd", @@ -2892,7 +2892,7 @@ dependencies = [ [[package]] name = "nargo_fmt" -version = "0.27.0" +version = "0.28.0" dependencies = [ "bytecount", "noirc_frontend", @@ -2904,7 +2904,7 @@ dependencies = [ [[package]] name = "nargo_toml" -version = "0.27.0" +version = "0.28.0" dependencies = [ "dirs", "fm", @@ -2983,7 +2983,7 @@ dependencies = [ [[package]] name = "noir_debugger" -version = "0.27.0" +version = "0.28.0" dependencies = [ "acvm", "assert_cmd", @@ -3018,7 +3018,7 @@ dependencies = [ [[package]] name = "noir_lsp" -version = "0.27.0" +version = "0.28.0" dependencies = [ "acvm", "async-lsp", @@ -3044,7 +3044,7 @@ dependencies = [ [[package]] name = "noir_wasm" -version = "0.27.0" +version = "0.28.0" dependencies = [ "acvm", "build-data", @@ -3067,7 +3067,7 @@ dependencies = [ [[package]] name = "noirc_abi" -version = "0.27.0" +version = "0.28.0" dependencies = [ "acvm", "iter-extended", @@ -3084,7 +3084,7 @@ dependencies = [ [[package]] name = "noirc_abi_wasm" -version = "0.27.0" +version = "0.28.0" dependencies = [ "acvm", "build-data", @@ -3101,7 +3101,7 @@ dependencies = [ [[package]] name = "noirc_driver" -version = "0.27.0" +version = "0.28.0" dependencies = [ "acvm", "aztec_macros", @@ -3122,7 +3122,7 @@ dependencies = [ [[package]] name = "noirc_errors" -version = "0.27.0" +version = "0.28.0" dependencies = [ "acvm", "base64 0.21.2", @@ -3140,7 +3140,7 @@ dependencies = [ [[package]] name = "noirc_evaluator" -version = "0.27.0" +version = "0.28.0" dependencies = [ "acvm", "chrono", @@ -3157,7 +3157,7 @@ dependencies = [ [[package]] name = "noirc_frontend" -version = "0.27.0" +version = "0.28.0" dependencies = [ "acvm", "arena", @@ -3186,7 +3186,7 @@ dependencies = [ [[package]] name = "noirc_printable_type" -version = "0.27.0" +version = "0.28.0" dependencies = [ "acvm", "iter-extended", diff --git a/noir/noir-repo/Cargo.toml b/noir/noir-repo/Cargo.toml index 132ff181e44..108d179b9ca 100644 --- a/noir/noir-repo/Cargo.toml +++ b/noir/noir-repo/Cargo.toml @@ -41,7 +41,7 @@ resolver = "2" [workspace.package] # x-release-please-start-version -version = "0.27.0" +version = "0.28.0" # x-release-please-end authors = ["The Noir Team "] edition = "2021" @@ -52,13 +52,13 @@ repository = "https://github.com/noir-lang/noir/" [workspace.dependencies] # ACVM workspace dependencies -acir_field = { version = "0.43.0", path = "acvm-repo/acir_field", default-features = false } -acir = { version = "0.43.0", path = "acvm-repo/acir", default-features = false } -acvm = { version = "0.43.0", path = "acvm-repo/acvm" } -brillig = { version = "0.43.0", path = "acvm-repo/brillig", default-features = false } -brillig_vm = { version = "0.43.0", path = "acvm-repo/brillig_vm", default-features = false } -acvm_blackbox_solver = { version = "0.43.0", path = "acvm-repo/blackbox_solver", default-features = false } -bn254_blackbox_solver = { version = "0.43.0", path = "acvm-repo/bn254_blackbox_solver", default-features = false } +acir_field = { version = "0.44.0", path = "acvm-repo/acir_field", default-features = false } +acir = { version = "0.44.0", path = "acvm-repo/acir", default-features = false } +acvm = { version = "0.44.0", path = "acvm-repo/acvm" } +brillig = { version = "0.44.0", path = "acvm-repo/brillig", default-features = false } +brillig_vm = { version = "0.44.0", path = "acvm-repo/brillig_vm", default-features = false } +acvm_blackbox_solver = { version = "0.44.0", path = "acvm-repo/blackbox_solver", default-features = false } +bn254_blackbox_solver = { version = "0.44.0", path = "acvm-repo/bn254_blackbox_solver", default-features = false } # Noir compiler workspace dependencies arena = { path = "compiler/utils/arena" } diff --git a/noir/noir-repo/acvm-repo/CHANGELOG.md b/noir/noir-repo/acvm-repo/CHANGELOG.md index 9d9ff539559..b7b8ef3c474 100644 --- a/noir/noir-repo/acvm-repo/CHANGELOG.md +++ b/noir/noir-repo/acvm-repo/CHANGELOG.md @@ -5,6 +5,125 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.44.0](https://github.com/noir-lang/noir/compare/v0.43.0...v0.44.0) (2024-04-24) + + +### ⚠ BREAKING CHANGES + +* contract interfaces and better function calls (https://github.com/AztecProtocol/aztec-packages/pull/5687) +* change backend width to 4 (https://github.com/AztecProtocol/aztec-packages/pull/5374) +* Use fixed size arrays in black box functions where sizes are known (https://github.com/AztecProtocol/aztec-packages/pull/5620) +* trap with revert data (https://github.com/AztecProtocol/aztec-packages/pull/5732) +* **acir:** BrilligCall opcode (https://github.com/AztecProtocol/aztec-packages/pull/5709) +* remove fixed-length keccak256 (https://github.com/AztecProtocol/aztec-packages/pull/5617) +* storage_layout and `#[aztec(storage)]` (https://github.com/AztecProtocol/aztec-packages/pull/5387) +* **acir:** Add predicate to call opcode (https://github.com/AztecProtocol/aztec-packages/pull/5616) +* contract_abi-exports (https://github.com/AztecProtocol/aztec-packages/pull/5386) +* Brillig typed memory (https://github.com/AztecProtocol/aztec-packages/pull/5395) +* **acir:** Program and witness stack structure (https://github.com/AztecProtocol/aztec-packages/pull/5149) +* automatic NoteInterface and NoteGetterOptions auto select (https://github.com/AztecProtocol/aztec-packages/pull/4508) +* Acir call opcode (https://github.com/AztecProtocol/aztec-packages/pull/4773) +* Support contracts with no constructor (https://github.com/AztecProtocol/aztec-packages/pull/5175) +* Internal as a macro (https://github.com/AztecProtocol/aztec-packages/pull/4898) +* move noir out of yarn-project (https://github.com/AztecProtocol/aztec-packages/pull/4479) +* note type ids (https://github.com/AztecProtocol/aztec-packages/pull/4500) +* rename bigint_neg into bigint_sub (https://github.com/AztecProtocol/aztec-packages/pull/4420) +* Add expression width into acir (https://github.com/AztecProtocol/aztec-packages/pull/4014) +* init storage macro (https://github.com/AztecProtocol/aztec-packages/pull/4200) +* **acir:** Move `is_recursive` flag to be part of the circuit definition (https://github.com/AztecProtocol/aztec-packages/pull/4221) +* Sync commits from `aztec-packages` ([#4144](https://github.com/noir-lang/noir/issues/4144)) +* Breaking changes from aztec-packages ([#3955](https://github.com/noir-lang/noir/issues/3955)) + +### Features + +* Acir call opcode (https://github.com/AztecProtocol/aztec-packages/pull/4773) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* **acir_gen:** Brillig stdlib ([#4848](https://github.com/noir-lang/noir/issues/4848)) ([0c8175c](https://github.com/noir-lang/noir/commit/0c8175cb539efd9427c73ae5af0d48abe688ebab)) +* **acir_gen:** Fold attribute at compile-time and initial non inlined ACIR (https://github.com/AztecProtocol/aztec-packages/pull/5341) ([a0f7474](https://github.com/noir-lang/noir/commit/a0f7474ae6bd74132efdb945d2eb2383f3913cce)) +* **acir:** Add predicate to call opcode (https://github.com/AztecProtocol/aztec-packages/pull/5616) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* **acir:** BrilligCall opcode (https://github.com/AztecProtocol/aztec-packages/pull/5709) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* **acir:** Program and witness stack structure (https://github.com/AztecProtocol/aztec-packages/pull/5149) ([13eb71b](https://github.com/noir-lang/noir/commit/13eb71b8de44eb6aad9c37943ad06fc73db589f5)) +* **acvm_js:** Execute program ([#4694](https://github.com/noir-lang/noir/issues/4694)) ([386f6d0](https://github.com/noir-lang/noir/commit/386f6d0a5822912db878285cb001032a7c0ff622)) +* **acvm:** Execute multiple circuits (https://github.com/AztecProtocol/aztec-packages/pull/5380) ([a0f7474](https://github.com/noir-lang/noir/commit/a0f7474ae6bd74132efdb945d2eb2383f3913cce)) +* Add bit size to const opcode (https://github.com/AztecProtocol/aztec-packages/pull/4385) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* Add CMOV instruction to brillig and brillig gen (https://github.com/AztecProtocol/aztec-packages/pull/5308) ([13eb71b](https://github.com/noir-lang/noir/commit/13eb71b8de44eb6aad9c37943ad06fc73db589f5)) +* Add expression width into acir (https://github.com/AztecProtocol/aztec-packages/pull/4014) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* Add instrumentation for tracking variables in debugging ([#4122](https://github.com/noir-lang/noir/issues/4122)) ([c58d691](https://github.com/noir-lang/noir/commit/c58d69141b54a918cd1675400c00bfd48720f896)) +* Add poseidon2 opcode implementation for acvm/brillig, and Noir ([#4398](https://github.com/noir-lang/noir/issues/4398)) ([10e8292](https://github.com/noir-lang/noir/commit/10e82920798380f50046e52db4a20ca205191ab7)) +* Add return values to aztec fns (https://github.com/AztecProtocol/aztec-packages/pull/5389) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Add support for overriding expression width ([#4117](https://github.com/noir-lang/noir/issues/4117)) ([c8026d5](https://github.com/noir-lang/noir/commit/c8026d557d535b10fe455165d6445076df7a03de)) +* Added cast opcode and cast calldata (https://github.com/AztecProtocol/aztec-packages/pull/4423) ([78ef013](https://github.com/noir-lang/noir/commit/78ef0134b82e76a73dadb6c7975def22290e3a1a)) +* Allow brillig to read arrays directly from memory (https://github.com/AztecProtocol/aztec-packages/pull/4460) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* Allow nested arrays and vectors in Brillig foreign calls (https://github.com/AztecProtocol/aztec-packages/pull/4478) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* Allow variables and stack trace inspection in the debugger ([#4184](https://github.com/noir-lang/noir/issues/4184)) ([bf263fc](https://github.com/noir-lang/noir/commit/bf263fc8d843940f328a90f6366edd2671fb2682)) +* Automatic NoteInterface and NoteGetterOptions auto select (https://github.com/AztecProtocol/aztec-packages/pull/4508) ([13eb71b](https://github.com/noir-lang/noir/commit/13eb71b8de44eb6aad9c37943ad06fc73db589f5)) +* **avm:** Back in avm context with macro - refactor context (https://github.com/AztecProtocol/aztec-packages/pull/4438) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* **avm:** Brillig CONST of size > u128 (https://github.com/AztecProtocol/aztec-packages/pull/5217) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* **avm:** Integrate AVM with initializers (https://github.com/AztecProtocol/aztec-packages/pull/5469) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* **aztec-nr:** Initial work for aztec public vm macro (https://github.com/AztecProtocol/aztec-packages/pull/4400) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* Backpropagate constants in ACIR during optimization ([#3926](https://github.com/noir-lang/noir/issues/3926)) ([aad0da0](https://github.com/noir-lang/noir/commit/aad0da024c69663f42e6913e674682d5864b26ae)) +* Breaking changes from aztec-packages ([#3955](https://github.com/noir-lang/noir/issues/3955)) ([5be049e](https://github.com/noir-lang/noir/commit/5be049eee6c342649462282ee04f6411e6ea392c)) +* Brillig heterogeneous memory cells (https://github.com/AztecProtocol/aztec-packages/pull/5608) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) +* Brillig IR refactor (https://github.com/AztecProtocol/aztec-packages/pull/5233) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* Brillig pointer codegen and execution (https://github.com/AztecProtocol/aztec-packages/pull/5737) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Brillig typed memory (https://github.com/AztecProtocol/aztec-packages/pull/5395) ([0bc18c4](https://github.com/noir-lang/noir/commit/0bc18c4f78171590dd58bded959f68f53a44cc8c)) +* Change backend width to 4 (https://github.com/AztecProtocol/aztec-packages/pull/5374) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Check initializer msg.sender matches deployer from address preimage (https://github.com/AztecProtocol/aztec-packages/pull/5222) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* Contract interfaces and better function calls (https://github.com/AztecProtocol/aztec-packages/pull/5687) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Contract_abi-exports (https://github.com/AztecProtocol/aztec-packages/pull/5386) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Evaluation of dynamic assert messages ([#4101](https://github.com/noir-lang/noir/issues/4101)) ([c284e01](https://github.com/noir-lang/noir/commit/c284e01bfe20ceae4414dc123624b5cbb8b66d09)) +* Impl of missing functionality in new key store (https://github.com/AztecProtocol/aztec-packages/pull/5750) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Init storage macro (https://github.com/AztecProtocol/aztec-packages/pull/4200) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* Initial Earthly CI (https://github.com/AztecProtocol/aztec-packages/pull/5069) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* Internal as a macro (https://github.com/AztecProtocol/aztec-packages/pull/4898) ([5f57ebb](https://github.com/noir-lang/noir/commit/5f57ebb7ff4b810802f90699a10f4325ef904f2e)) +* **nargo:** Handle call stacks for multiple Acir calls ([#4711](https://github.com/noir-lang/noir/issues/4711)) ([5b23171](https://github.com/noir-lang/noir/commit/5b231714740447d82cde7cdbe65d4a8b46a31df4)) +* New brillig field operations and refactor of binary operations (https://github.com/AztecProtocol/aztec-packages/pull/5208) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* Note type ids (https://github.com/AztecProtocol/aztec-packages/pull/4500) ([78ef013](https://github.com/noir-lang/noir/commit/78ef0134b82e76a73dadb6c7975def22290e3a1a)) +* Remove range constraints from witnesses which are constrained to be constants ([#3928](https://github.com/noir-lang/noir/issues/3928)) ([afe9c7a](https://github.com/noir-lang/noir/commit/afe9c7a38bb9d4245205d3aa46d4ce23d70a5671)) +* Remove replacement of boolean range opcodes with `AssertZero` opcodes ([#4107](https://github.com/noir-lang/noir/issues/4107)) ([dac0e87](https://github.com/noir-lang/noir/commit/dac0e87ee3be3446b92bbb12ef4832fd493fcee3)) +* Restore hashing args via slice for performance (https://github.com/AztecProtocol/aztec-packages/pull/5539) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Signed integer division and modulus in brillig gen (https://github.com/AztecProtocol/aztec-packages/pull/5279) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* **simulator:** Fetch return values at circuit execution (https://github.com/AztecProtocol/aztec-packages/pull/5642) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) +* Storage_layout and `#[aztec(storage)]` (https://github.com/AztecProtocol/aztec-packages/pull/5387) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Support contracts with no constructor (https://github.com/AztecProtocol/aztec-packages/pull/5175) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* Sync `aztec-packages` ([#4011](https://github.com/noir-lang/noir/issues/4011)) ([fee2452](https://github.com/noir-lang/noir/commit/fee24523c427c27f0bdaf98ea09a852a2da3e94c)) +* Sync commits from `aztec-packages` ([#4068](https://github.com/noir-lang/noir/issues/4068)) ([7a8f3a3](https://github.com/noir-lang/noir/commit/7a8f3a33b57875e681e3d81e667e3570a1cdbdcc)) +* Sync commits from `aztec-packages` ([#4144](https://github.com/noir-lang/noir/issues/4144)) ([0205d3b](https://github.com/noir-lang/noir/commit/0205d3b4ad0cf5ffd775a43eb5af273a772cf138)) +* Sync from aztec-packages ([#4483](https://github.com/noir-lang/noir/issues/4483)) ([fe8f277](https://github.com/noir-lang/noir/commit/fe8f2776ccfde29209a2c3fc162311c99e4f59be)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5234) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5286) ([c3c9e19](https://github.com/noir-lang/noir/commit/c3c9e19a20d61272a04b95fd6c7d34cc4cb96e45)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5572) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5619) ([2bd006a](https://github.com/noir-lang/noir/commit/2bd006ae07499e8702b0fa9565855f0a5ef1a589)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5697) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5794) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5814) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5935) ([1b867b1](https://github.com/noir-lang/noir/commit/1b867b121fba5db3087ca845b4934e6732b23fd1)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5955) ([1b867b1](https://github.com/noir-lang/noir/commit/1b867b121fba5db3087ca845b4934e6732b23fd1)) +* Sync from noir (https://github.com/AztecProtocol/aztec-packages/pull/5999) ([1b867b1](https://github.com/noir-lang/noir/commit/1b867b121fba5db3087ca845b4934e6732b23fd1)) +* Trap with revert data (https://github.com/AztecProtocol/aztec-packages/pull/5732) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Use fixed size arrays in black box functions where sizes are known (https://github.com/AztecProtocol/aztec-packages/pull/5620) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Variable length returns (https://github.com/AztecProtocol/aztec-packages/pull/5633) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) + + +### Bug Fixes + +* **acvm:** Mark outputs of Opcode::Call solvable ([#4708](https://github.com/noir-lang/noir/issues/4708)) ([8fea405](https://github.com/noir-lang/noir/commit/8fea40576f262bd5bb588923c0660d8967404e56)) +* Avoid huge unrolling in hash_args (https://github.com/AztecProtocol/aztec-packages/pull/5703) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) +* Catch panics from EC point creation (e.g. the point is at infinity) ([#4790](https://github.com/noir-lang/noir/issues/4790)) ([645dba1](https://github.com/noir-lang/noir/commit/645dba192f16ef34018828186ffb297422a8dc73)) +* Don't reuse brillig with slice arguments (https://github.com/AztecProtocol/aztec-packages/pull/5800) ([0f9ae0a](https://github.com/noir-lang/noir/commit/0f9ae0ac1d68714b56ba4524aedcc67212494f1b)) +* Issue 4682 and add solver for unconstrained bigintegers ([#4729](https://github.com/noir-lang/noir/issues/4729)) ([e4d33c1](https://github.com/noir-lang/noir/commit/e4d33c126a2795d9aaa6048d4e91b64cb4bbe4f2)) +* Noir test incorrect reporting (https://github.com/AztecProtocol/aztec-packages/pull/4925) ([5f57ebb](https://github.com/noir-lang/noir/commit/5f57ebb7ff4b810802f90699a10f4325ef904f2e)) +* Proper field inversion for bigints ([#4802](https://github.com/noir-lang/noir/issues/4802)) ([b46d0e3](https://github.com/noir-lang/noir/commit/b46d0e39f4252f8bbaa987f88d112e4c233b3d61)) +* Remove panic from `init_log_level` in `acvm_js` ([#4195](https://github.com/noir-lang/noir/issues/4195)) ([2e26530](https://github.com/noir-lang/noir/commit/2e26530bf53006c1ed4fee310bcaa905c95dd95b)) +* Return error rather instead of panicking on invalid circuit ([#3976](https://github.com/noir-lang/noir/issues/3976)) ([67201bf](https://github.com/noir-lang/noir/commit/67201bfc21a9c8858aa86be9cd47d463fb78d925)) + + +### Miscellaneous Chores + +* **acir:** Move `is_recursive` flag to be part of the circuit definition (https://github.com/AztecProtocol/aztec-packages/pull/4221) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) +* Move noir out of yarn-project (https://github.com/AztecProtocol/aztec-packages/pull/4479) ([78ef013](https://github.com/noir-lang/noir/commit/78ef0134b82e76a73dadb6c7975def22290e3a1a)) +* Remove fixed-length keccak256 (https://github.com/AztecProtocol/aztec-packages/pull/5617) ([305bcdc](https://github.com/noir-lang/noir/commit/305bcdcbd01cb84dbaac900f14cb6cf867f83bda)) +* Rename bigint_neg into bigint_sub (https://github.com/AztecProtocol/aztec-packages/pull/4420) ([158c8ce](https://github.com/noir-lang/noir/commit/158c8cec7f0dc698042e9512001dd2c9d6b40bcc)) + ## [0.43.0](https://github.com/noir-lang/noir/compare/v0.42.0...v0.43.0) (2024-04-10) diff --git a/noir/noir-repo/acvm-repo/acir/Cargo.toml b/noir/noir-repo/acvm-repo/acir/Cargo.toml index d6990f83281..96ba13e3b3b 100644 --- a/noir/noir-repo/acvm-repo/acir/Cargo.toml +++ b/noir/noir-repo/acvm-repo/acir/Cargo.toml @@ -2,7 +2,7 @@ name = "acir" description = "ACIR is the IR that the VM processes, it is analogous to LLVM IR" # x-release-please-start-version -version = "0.43.0" +version = "0.44.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/noir-repo/acvm-repo/acir_field/Cargo.toml b/noir/noir-repo/acvm-repo/acir_field/Cargo.toml index 7a260ea1fa2..fcbe80ded2d 100644 --- a/noir/noir-repo/acvm-repo/acir_field/Cargo.toml +++ b/noir/noir-repo/acvm-repo/acir_field/Cargo.toml @@ -2,7 +2,7 @@ name = "acir_field" description = "The field implementation being used by ACIR." # x-release-please-start-version -version = "0.43.0" +version = "0.44.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/noir-repo/acvm-repo/acvm/Cargo.toml b/noir/noir-repo/acvm-repo/acvm/Cargo.toml index e6554d3f773..0061eec5bc8 100644 --- a/noir/noir-repo/acvm-repo/acvm/Cargo.toml +++ b/noir/noir-repo/acvm-repo/acvm/Cargo.toml @@ -2,7 +2,7 @@ name = "acvm" description = "The virtual machine that processes ACIR given a backend/proof system." # x-release-please-start-version -version = "0.43.0" +version = "0.44.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/brillig.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/brillig.rs index 67faf7f5007..10178465d58 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/brillig.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/brillig.rs @@ -30,19 +30,6 @@ pub struct BrilligSolver<'b, B: BlackBoxFunctionSolver> { } impl<'b, B: BlackBoxFunctionSolver> BrilligSolver<'b, B> { - /// Evaluates if the Brillig block should be skipped entirely - pub(super) fn should_skip( - witness: &WitnessMap, - brillig: &Brillig, - ) -> Result { - // If the predicate is `None`, the block should never be skipped - // If the predicate is `Some` but we cannot find a value, then we return stalled - match &brillig.predicate { - Some(pred) => Ok(get_value(pred, witness)?.is_zero()), - None => Ok(false), - } - } - /// Assigns the zero value to all outputs of the given [`Brillig`] bytecode. pub(super) fn zero_out_brillig_outputs( initial_witness: &mut WitnessMap, diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs index 652e173867a..3d3c52c661b 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs @@ -430,7 +430,7 @@ impl<'a, B: BlackBoxFunctionSolver> ACVM<'a, B> { let Opcode::BrilligCall { id, inputs, outputs, predicate } = &self.opcodes[self.instruction_pointer] else { - unreachable!("Not executing a Brillig opcode"); + unreachable!("Not executing a BrilligCall opcode"); }; let witness = &mut self.witness_map; @@ -468,27 +468,28 @@ impl<'a, B: BlackBoxFunctionSolver> ACVM<'a, B> { } } - pub fn step_into_brillig_opcode(&mut self) -> StepResult<'a, B> { - let Opcode::Brillig(brillig) = &self.opcodes[self.instruction_pointer] else { + pub fn step_into_brillig(&mut self) -> StepResult<'a, B> { + let Opcode::BrilligCall { id, inputs, outputs, predicate } = + &self.opcodes[self.instruction_pointer] + else { return StepResult::Status(self.solve_opcode()); }; let witness = &mut self.witness_map; - let should_skip = match BrilligSolver::::should_skip(witness, brillig) { + let should_skip = match is_predicate_false(witness, predicate) { Ok(result) => result, Err(err) => return StepResult::Status(self.handle_opcode_resolution(Err(err))), }; - if should_skip { - let resolution = - BrilligSolver::::zero_out_brillig_outputs(witness, &brillig.outputs); + let resolution = BrilligSolver::::zero_out_brillig_outputs(witness, outputs); return StepResult::Status(self.handle_opcode_resolution(resolution)); } - let solver = BrilligSolver::new( + let solver = BrilligSolver::new_call( witness, &self.block_solvers, - brillig, + inputs, + &self.unconstrained_functions[*id as usize].bytecode, self.backend, self.instruction_pointer, ); @@ -499,8 +500,8 @@ impl<'a, B: BlackBoxFunctionSolver> ACVM<'a, B> { } pub fn finish_brillig_with_solver(&mut self, solver: BrilligSolver<'a, B>) -> ACVMStatus { - if !matches!(&self.opcodes[self.instruction_pointer], Opcode::Brillig(..)) { - unreachable!("Not executing a Brillig opcode"); + if !matches!(self.opcodes[self.instruction_pointer], Opcode::BrilligCall { .. }) { + unreachable!("Not executing a Brillig/BrilligCall opcode"); } self.brillig_solver = Some(solver); self.solve_opcode() diff --git a/noir/noir-repo/acvm-repo/acvm_js/Cargo.toml b/noir/noir-repo/acvm-repo/acvm_js/Cargo.toml index 4635dc8663e..b2db54a4e65 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/Cargo.toml +++ b/noir/noir-repo/acvm-repo/acvm_js/Cargo.toml @@ -2,7 +2,7 @@ name = "acvm_js" description = "Typescript wrapper around the ACVM allowing execution of ACIR code" # x-release-please-start-version -version = "0.43.0" +version = "0.44.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/noir-repo/acvm-repo/acvm_js/package.json b/noir/noir-repo/acvm-repo/acvm_js/package.json index 63f12942018..7da8bf84b02 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/package.json +++ b/noir/noir-repo/acvm-repo/acvm_js/package.json @@ -1,6 +1,6 @@ { "name": "@noir-lang/acvm_js", - "version": "0.43.0", + "version": "0.44.0", "publishConfig": { "access": "public" }, diff --git a/noir/noir-repo/acvm-repo/blackbox_solver/Cargo.toml b/noir/noir-repo/acvm-repo/blackbox_solver/Cargo.toml index 1d6629c8223..893bed38905 100644 --- a/noir/noir-repo/acvm-repo/blackbox_solver/Cargo.toml +++ b/noir/noir-repo/acvm-repo/blackbox_solver/Cargo.toml @@ -2,7 +2,7 @@ name = "acvm_blackbox_solver" description = "A solver for the blackbox functions found in ACIR and Brillig" # x-release-please-start-version -version = "0.43.0" +version = "0.44.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml index 448642e1a9e..d856a57eb9b 100644 --- a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml +++ b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/Cargo.toml @@ -2,7 +2,7 @@ name = "bn254_blackbox_solver" description = "Solvers for black box functions which are specific for the bn254 curve" # x-release-please-start-version -version = "0.43.0" +version = "0.44.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/noir-repo/acvm-repo/brillig/Cargo.toml b/noir/noir-repo/acvm-repo/brillig/Cargo.toml index 463f6286d6b..41c2cebdad9 100644 --- a/noir/noir-repo/acvm-repo/brillig/Cargo.toml +++ b/noir/noir-repo/acvm-repo/brillig/Cargo.toml @@ -2,7 +2,7 @@ name = "brillig" description = "Brillig is the bytecode ACIR uses for non-determinism." # x-release-please-start-version -version = "0.43.0" +version = "0.44.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/noir-repo/acvm-repo/brillig_vm/Cargo.toml b/noir/noir-repo/acvm-repo/brillig_vm/Cargo.toml index 67e16c21d8b..3dcc05c0842 100644 --- a/noir/noir-repo/acvm-repo/brillig_vm/Cargo.toml +++ b/noir/noir-repo/acvm-repo/brillig_vm/Cargo.toml @@ -2,7 +2,7 @@ name = "brillig_vm" description = "The virtual machine that processes Brillig bytecode, used to introduce non-determinism to the ACVM" # x-release-please-start-version -version = "0.43.0" +version = "0.44.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index e591a3d478c..276a8247cea 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -566,22 +566,12 @@ impl<'a> FunctionContext<'a> { mut rhs: ValueId, location: Location, ) -> Values { - let result_type = self.builder.type_of_value(lhs); - let mut result = match operator { - BinaryOpKind::Equal | BinaryOpKind::NotEqual - if matches!(result_type, Type::Array(..)) => - { - return self.insert_array_equality(lhs, operator, rhs, location) - } - _ => { - let op = convert_operator(operator); - if operator_requires_swapped_operands(operator) { - std::mem::swap(&mut lhs, &mut rhs); - } + let op = convert_operator(operator); + if operator_requires_swapped_operands(operator) { + std::mem::swap(&mut lhs, &mut rhs); + } - self.builder.set_location(location).insert_binary(lhs, op, rhs) - } - }; + let mut result = self.builder.set_location(location).insert_binary(lhs, op, rhs); // Check for integer overflow if matches!( @@ -600,94 +590,6 @@ impl<'a> FunctionContext<'a> { result.into() } - /// The frontend claims to support equality (==) on arrays, so we must support it in SSA here. - /// The actual BinaryOp::Eq in SSA is meant only for primitive numeric types so we encode an - /// entire equality loop on each array element. The generated IR is as follows: - /// - /// ... - /// result_alloc = allocate - /// store u1 1 in result_alloc - /// jmp loop_start(0) - /// loop_start(i: Field): - /// v0 = lt i, array_len - /// jmpif v0, then: loop_body, else: loop_end - /// loop_body(): - /// v1 = array_get lhs, index i - /// v2 = array_get rhs, index i - /// v3 = eq v1, v2 - /// v4 = load result_alloc - /// v5 = and v4, v3 - /// store v5 in result_alloc - /// v6 = add i, Field 1 - /// jmp loop_start(v6) - /// loop_end(): - /// result = load result_alloc - fn insert_array_equality( - &mut self, - lhs: ValueId, - operator: BinaryOpKind, - rhs: ValueId, - location: Location, - ) -> Values { - let lhs_type = self.builder.type_of_value(lhs); - let rhs_type = self.builder.type_of_value(rhs); - - let (array_length, element_type) = match (lhs_type, rhs_type) { - ( - Type::Array(lhs_composite_type, lhs_length), - Type::Array(rhs_composite_type, rhs_length), - ) => { - assert!( - lhs_composite_type.len() == 1 && rhs_composite_type.len() == 1, - "== is unimplemented for arrays of structs" - ); - assert_eq!(lhs_composite_type[0], rhs_composite_type[0]); - assert_eq!(lhs_length, rhs_length, "Expected two arrays of equal length"); - (lhs_length, lhs_composite_type[0].clone()) - } - _ => unreachable!("Expected two array values"), - }; - - let loop_start = self.builder.insert_block(); - let loop_body = self.builder.insert_block(); - let loop_end = self.builder.insert_block(); - - // pre-loop - let result_alloc = self.builder.set_location(location).insert_allocate(Type::bool()); - let true_value = self.builder.numeric_constant(1u128, Type::bool()); - self.builder.insert_store(result_alloc, true_value); - let zero = self.builder.length_constant(0u128); - self.builder.terminate_with_jmp(loop_start, vec![zero]); - - // loop_start - self.builder.switch_to_block(loop_start); - let i = self.builder.add_block_parameter(loop_start, Type::length_type()); - let array_length = self.builder.length_constant(array_length as u128); - let v0 = self.builder.insert_binary(i, BinaryOp::Lt, array_length); - self.builder.terminate_with_jmpif(v0, loop_body, loop_end); - - // loop body - self.builder.switch_to_block(loop_body); - let v1 = self.builder.insert_array_get(lhs, i, element_type.clone()); - let v2 = self.builder.insert_array_get(rhs, i, element_type); - let v3 = self.builder.insert_binary(v1, BinaryOp::Eq, v2); - let v4 = self.builder.insert_load(result_alloc, Type::bool()); - let v5 = self.builder.insert_binary(v4, BinaryOp::And, v3); - self.builder.insert_store(result_alloc, v5); - let one = self.builder.length_constant(1u128); - let v6 = self.builder.insert_binary(i, BinaryOp::Add, one); - self.builder.terminate_with_jmp(loop_start, vec![v6]); - - // loop end - self.builder.switch_to_block(loop_end); - let mut result = self.builder.insert_load(result_alloc, Type::bool()); - - if operator_requires_not(operator) { - result = self.builder.insert_not(result); - } - result.into() - } - /// Inserts a call instruction at the end of the current block and returns the results /// of the call. /// diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/expr.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/expr.rs index 0bc7673e105..7b30777c3ba 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -890,36 +890,6 @@ impl<'interner> TypeChecker<'interner> { // <= and friends are technically valid for booleans, just not very useful (Bool, Bool) => Ok((Bool, false)), - // Special-case == and != for arrays - (Array(x_size, x_type), Array(y_size, y_type)) - if matches!(op.kind, BinaryOpKind::Equal | BinaryOpKind::NotEqual) => - { - self.unify(x_size, y_size, || TypeCheckError::TypeMismatchWithSource { - expected: lhs_type.clone(), - actual: rhs_type.clone(), - source: Source::ArrayLen, - span: op.location.span, - }); - - let (_, use_impl) = self.comparator_operand_type_rules(x_type, y_type, op, span)?; - - // If the size is not constant, we must fall back to a user-provided impl for - // equality on slices. - let size = x_size.follow_bindings(); - let use_impl = use_impl || size.evaluate_to_u64().is_none(); - Ok((Bool, use_impl)) - } - - (String(x_size), String(y_size)) => { - self.unify(x_size, y_size, || TypeCheckError::TypeMismatchWithSource { - expected: *x_size.clone(), - actual: *y_size.clone(), - span: op.location.span, - source: Source::StringLen, - }); - - Ok((Bool, false)) - } (lhs, rhs) => { self.unify(lhs, rhs, || TypeCheckError::TypeMismatchWithSource { expected: lhs.clone(), diff --git a/noir/noir-repo/compiler/wasm/package.json b/noir/noir-repo/compiler/wasm/package.json index 3bcf60afbe8..e8e7c0ab3c9 100644 --- a/noir/noir-repo/compiler/wasm/package.json +++ b/noir/noir-repo/compiler/wasm/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.27.0", + "version": "0.28.0", "license": "(MIT OR Apache-2.0)", "main": "dist/main.js", "types": "./dist/types/src/index.d.cts", diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/explainers/explainer-oracle.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/explainers/explainer-oracle.md new file mode 100644 index 00000000000..b84ca5dd986 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/explainers/explainer-oracle.md @@ -0,0 +1,57 @@ +--- +title: Oracles +description: This guide provides an in-depth understanding of how Oracles work in Noir programming. Learn how to use outside calculations in your programs, constrain oracles, and understand their uses and limitations. +keywords: + - Noir Programming + - Oracles + - JSON-RPC + - Foreign Call Handlers + - Constrained Functions + - Blockchain Programming +sidebar_position: 1 +--- + +If you've seen "The Matrix" you may recall "The Oracle" as Gloria Foster smoking cigarettes and baking cookies. While she appears to "know things", she is actually providing a calculation of a pre-determined future. Noir Oracles are similar, in a way. They don't calculate the future (yet), but they allow you to use outside calculations in your programs. + +![matrix oracle prediction](@site/static/img/memes/matrix_oracle.jpeg) + +A Noir program is usually self-contained. You can pass certain inputs to it, and it will generate a deterministic output for those inputs. But what if you wanted to defer some calculation to an outside process or source? + +Oracles are functions that provide this feature. + +## Use cases + +An example usage for Oracles is proving something on-chain. For example, proving that the ETH-USDC quote was below a certain target at a certain block time. Or even making more complex proofs like proving the ownership of an NFT as an anonymous login method. + +Another interesting use case is to defer expensive calculations to be made outside of the Noir program, and then constraining the result; similar to the use of [unconstrained functions](../noir/concepts//unconstrained.md). + +In short, anything that can be constrained in a Noir program but needs to be fetched from an external source is a great candidate to be used in oracles. + +## Constraining oracles + +Just like in The Matrix, Oracles are powerful. But with great power, comes great responsibility. Just because you're using them in a Noir program doesn't mean they're true. Noir has no superpowers. If you want to prove that Portugal won the Euro Cup 2016, you're still relying on potentially untrusted information. + +To give a concrete example, Alice wants to login to the [NounsDAO](https://nouns.wtf/) forum with her username "noir_nouner" by proving she owns a noun without revealing her ethereum address. Her Noir program could have a oracle call like this: + +```rust +#[oracle(getNoun)] +unconstrained fn get_noun(address: Field) -> Field +``` + +This oracle could naively resolve with the number of Nouns she possesses. However, it is useless as a trusted source, as the oracle could resolve to anything Alice wants. In order to make this oracle call actually useful, Alice would need to constrain the response from the oracle, by proving her address and the noun count belongs to the state tree of the contract. + +In short, **Oracles don't prove anything. Your Noir program does.** + +:::danger + +If you don't constrain the return of your oracle, you could be clearly opening an attack vector on your Noir program. Make double-triple sure that the return of an oracle call is constrained! + +::: + +## How to use Oracles + +On CLI, Nargo resolves oracles by making JSON RPC calls, which means it would require an RPC node to be running. + +In JavaScript, NoirJS accepts and resolves arbitrary call handlers (that is, not limited to JSON) as long as they matches the expected types the developer defines. Refer to [Foreign Call Handler](../reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md) to learn more about NoirJS's call handling. + +If you want to build using oracles, follow through to the [oracle guide](../how_to/how-to-oracles.md) for a simple example on how to do that. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/explainers/explainer-recursion.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/explainers/explainer-recursion.md new file mode 100644 index 00000000000..18846176ca7 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/explainers/explainer-recursion.md @@ -0,0 +1,176 @@ +--- +title: Recursive proofs +description: Explore the concept of recursive proofs in Zero-Knowledge programming. Understand how recursion works in Noir, a language for writing smart contracts on the EVM blockchain. Learn through practical examples like Alice and Bob's guessing game, Charlie's recursive merkle tree, and Daniel's reusable components. Discover how to use recursive proofs to optimize computational resources and improve efficiency. + +keywords: + [ + "Recursive Proofs", + "Zero-Knowledge Programming", + "Noir", + "EVM Blockchain", + "Smart Contracts", + "Recursion in Noir", + "Alice and Bob Guessing Game", + "Recursive Merkle Tree", + "Reusable Components", + "Optimizing Computational Resources", + "Improving Efficiency", + "Verification Key", + "Aggregation", + "Recursive zkSNARK schemes", + "PLONK", + "Proving and Verification Keys" + ] +sidebar_position: 1 +pagination_next: how_to/how-to-recursion +--- + +In programming, we tend to think of recursion as something calling itself. A classic example would be the calculation of the factorial of a number: + +```js +function factorial(n) { + if (n === 0 || n === 1) { + return 1; + } else { + return n * factorial(n - 1); + } +} +``` + +In this case, while `n` is not `1`, this function will keep calling itself until it hits the base case, bubbling up the result on the call stack: + +```md + Is `n` 1? <--------- + /\ / + / \ n = n -1 + / \ / + Yes No -------- +``` + +In Zero-Knowledge, recursion has some similarities. + +It is not a Noir function calling itself, but a proof being used as an input to another circuit. In short, you verify one proof *inside* another proof, returning the proof that both proofs are valid. + +This means that, given enough computational resources, you can prove the correctness of any arbitrary number of proofs in a single proof. This could be useful to design state channels (for which a common example would be [Bitcoin's Lightning Network](https://en.wikipedia.org/wiki/Lightning_Network)), to save on gas costs by settling one proof on-chain, or simply to make business logic less dependent on a consensus mechanism. + +## Examples + +Let us look at some of these examples + +### Alice and Bob - Guessing game + +Alice and Bob are friends, and they like guessing games. They want to play a guessing game online, but for that, they need a trusted third-party that knows both of their secrets and finishes the game once someone wins. + +So, they use zero-knowledge proofs. Alice tries to guess Bob's number, and Bob will generate a ZK proof stating whether she succeeded or failed. + +This ZK proof can go on a smart contract, revealing the winner and even giving prizes. However, this means every turn needs to be verified on-chain. This incurs some cost and waiting time that may simply make the game too expensive or time-consuming to be worth it. + +As a solution, Alice proposes the following: "what if Bob generates his proof, and instead of sending it on-chain, I verify it *within* my own proof before playing my own turn?". + +She can then generate a proof that she verified his proof, and so on. + +```md + Did you fail? <-------------------------- + / \ / + / \ n = n -1 + / \ / + Yes No / + | | / + | | / + | You win / + | / + | / +Generate proof of that / + + / + my own guess ---------------- +``` + +### Charlie - Recursive merkle tree + +Charlie is a concerned citizen, and wants to be sure his vote in an election is accounted for. He votes with a ZK proof, but he has no way of knowing that his ZK proof was included in the total vote count! + +If the vote collector puts all of the votes into a [Merkle tree](https://en.wikipedia.org/wiki/Merkle_tree), everyone can prove the verification of two proofs within one proof, as such: + +```md + abcd + __________|______________ + | | + ab cd + _____|_____ ______|______ + | | | | + alice bob charlie daniel +``` + +Doing this recursively allows us to arrive on a final proof `abcd` which if true, verifies the correctness of all the votes. + +### Daniel - Reusable components + +Daniel has a big circuit and a big headache. A part of his circuit is a setup phase that finishes with some assertions that need to be made. But that section alone takes most of the proving time, and is largely independent of the rest of the circuit. + +He might find it more efficient to generate a proof for that setup phase separately, and verify that proof recursively in the actual business logic section of his circuit. This will allow for parallelization of both proofs, which results in a considerable speedup. + +## What params do I need + +As you can see in the [recursion reference](noir/standard_library/recursion.md), a simple recursive proof requires: + +- The proof to verify +- The Verification Key of the circuit that generated the proof +- A hash of this verification key, as it's needed for some backends +- The public inputs for the proof + +:::info + +Recursive zkSNARK schemes do not necessarily "verify a proof" in the sense that you expect a true or false to be spit out by the verifier. Rather an aggregation object is built over the public inputs. + +So, taking the example of Alice and Bob and their guessing game: + +- Alice makes her guess. Her proof is *not* recursive: it doesn't verify any proof within it! It's just a standard `assert(x != y)` circuit +- Bob verifies Alice's proof and makes his own guess. In this circuit, he doesn't exactly *prove* the verification of Alice's proof. Instead, he *aggregates* his proof to Alice's proof. The actual verification is done when the full proof is verified, for example when using `nargo verify` or through the verifier smart contract. + +We can imagine recursive proofs a [relay race](https://en.wikipedia.org/wiki/Relay_race). The first runner doesn't have to receive the baton from anyone else, as he/she already starts with it. But when his/her turn is over, the next runner needs to receive it, run a bit more, and pass it along. Even though every runner could theoretically verify the baton mid-run (why not? 🏃🔍), only at the end of the race does the referee verify that the whole race is valid. + +::: + +## Some architecture + +As with everything in computer science, there's no one-size-fits all. But there are some patterns that could help understanding and implementing them. To give three examples: + +### Adding some logic to a proof verification + +This would be an approach for something like our guessing game, where proofs are sent back and forth and are verified by each opponent. This circuit would be divided in two sections: + +- A `recursive verification` section, which would be just the call to `std::verify_proof`, and that would be skipped on the first move (since there's no proof to verify) +- A `guessing` section, which is basically the logic part where the actual guessing happens + +In such a situation, and assuming Alice is first, she would skip the first part and try to guess Bob's number. Bob would then verify her proof on the first section of his run, and try to guess Alice's number on the second part, and so on. + +### Aggregating proofs + +In some one-way interaction situations, recursion would allow for aggregation of simple proofs that don't need to be immediately verified on-chain or elsewhere. + +To give a practical example, a barman wouldn't need to verify a "proof-of-age" on-chain every time he serves alcohol to a customer. Instead, the architecture would comprise two circuits: + +- A `main`, non-recursive circuit with some logic +- A `recursive` circuit meant to verify two proofs in one proof + +The customer's proofs would be intermediate, and made on their phones, and the barman could just verify them locally. He would then aggregate them into a final proof sent on-chain (or elsewhere) at the end of the day. + +### Recursively verifying different circuits + +Nothing prevents you from verifying different circuits in a recursive proof, for example: + +- A `circuit1` circuit +- A `circuit2` circuit +- A `recursive` circuit + +In this example, a regulator could verify that taxes were paid for a specific purchase by aggregating both a `payer` circuit (proving that a purchase was made and taxes were paid), and a `receipt` circuit (proving that the payment was received) + +## How fast is it + +At the time of writing, verifying recursive proofs is surprisingly fast. This is because most of the time is spent on generating the verification key that will be used to generate the next proof. So you are able to cache the verification key and reuse it later. + +Currently, Noir JS packages don't expose the functionality of loading proving and verification keys, but that feature exists in the underlying `bb.js` package. + +## How can I try it + +Learn more about using recursion in Nargo and NoirJS in the [how-to guide](../how_to/how-to-recursion.md) and see a full example in [noir-examples](https://github.com/noir-lang/noir-examples). diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/_category_.json new file mode 100644 index 00000000000..5d694210bbf --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 0, + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/hello_noir/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/hello_noir/_category_.json new file mode 100644 index 00000000000..23b560f610b --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/hello_noir/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 1, + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/hello_noir/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/hello_noir/index.md new file mode 100644 index 00000000000..743c4d8d634 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/hello_noir/index.md @@ -0,0 +1,142 @@ +--- +title: Creating a Project +description: + Learn how to create and verify your first Noir program using Nargo, a programming language for + zero-knowledge proofs. +keywords: + [ + Nargo, + Noir, + zero-knowledge proofs, + programming language, + create Noir program, + verify Noir program, + step-by-step guide, + ] +sidebar_position: 1 + +--- + +Now that we have installed Nargo, it is time to make our first hello world program! + +## Create a Project Directory + +Noir code can live anywhere on your computer. Let us create a _projects_ folder in the home +directory to house our Noir programs. + +For Linux, macOS, and Windows PowerShell, create the directory and change directory into it by +running: + +```sh +mkdir ~/projects +cd ~/projects +``` + +## Create Our First Nargo Project + +Now that we are in the projects directory, create a new Nargo project by running: + +```sh +nargo new hello_world +``` + +> **Note:** `hello_world` can be any arbitrary project name, we are simply using `hello_world` for +> demonstration. +> +> In production, the common practice is to name the project folder as `circuits` for better +> identifiability when sitting alongside other folders in the codebase (e.g. `contracts`, `scripts`, +> `test`). + +A `hello_world` folder would be created. Similar to Rust, the folder houses _src/main.nr_ and +_Nargo.toml_ which contain the source code and environmental options of your Noir program +respectively. + +### Intro to Noir Syntax + +Let us take a closer look at _main.nr_. The default _main.nr_ generated should look like this: + +```rust +fn main(x : Field, y : pub Field) { + assert(x != y); +} +``` + +The first line of the program specifies the program's inputs: + +```rust +x : Field, y : pub Field +``` + +Program inputs in Noir are private by default (e.g. `x`), but can be labeled public using the +keyword `pub` (e.g. `y`). To learn more about private and public values, check the +[Data Types](../../noir/concepts/data_types/index.md) section. + +The next line of the program specifies its body: + +```rust +assert(x != y); +``` + +The Noir syntax `assert` can be interpreted as something similar to constraints in other zk-contract languages. + +For more Noir syntax, check the [Language Concepts](../../noir/concepts/comments.md) chapter. + +## Build In/Output Files + +Change directory into _hello_world_ and build in/output files for your Noir program by running: + +```sh +cd hello_world +nargo check +``` + +Two additional files would be generated in your project directory: + +_Prover.toml_ houses input values, and _Verifier.toml_ houses public values. + +## Prove Our Noir Program + +Now that the project is set up, we can create a proof of correct execution of our Noir program. + +Fill in input values for execution in the _Prover.toml_ file. For example: + +```toml +x = "1" +y = "2" +``` + +Prove the valid execution of your Noir program: + +```sh +nargo prove +``` + +A new folder _proofs_ would then be generated in your project directory, containing the proof file +`.proof`, where the project name is defined in Nargo.toml. + +The _Verifier.toml_ file would also be updated with the public values computed from program +execution (in this case the value of `y`): + +```toml +y = "0x0000000000000000000000000000000000000000000000000000000000000002" +``` + +> **Note:** Values in _Verifier.toml_ are computed as 32-byte hex values. + +## Verify Our Noir Program + +Once a proof is generated, we can verify correct execution of our Noir program by verifying the +proof file. + +Verify your proof by running: + +```sh +nargo verify +``` + +The verification will complete in silence if it is successful. If it fails, it will log the +corresponding error instead. + +Congratulations, you have now created and verified a proof for your very first Noir program! + +In the [next section](./project_breakdown.md), we will go into more detail on each step performed. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/hello_noir/project_breakdown.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/hello_noir/project_breakdown.md new file mode 100644 index 00000000000..6160a102c6c --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/hello_noir/project_breakdown.md @@ -0,0 +1,199 @@ +--- +title: Project Breakdown +description: + Learn about the anatomy of a Nargo project, including the purpose of the Prover and Verifier TOML + files, and how to prove and verify your program. +keywords: + [Nargo, Nargo project, Prover.toml, Verifier.toml, proof verification, private asset transfer] +sidebar_position: 2 +--- + +This section breaks down our hello world program from the previous section. We elaborate on the project +structure and what the `prove` and `verify` commands did. + +## Anatomy of a Nargo Project + +Upon creating a new project with `nargo new` and building the in/output files with `nargo check` +commands, you would get a minimal Nargo project of the following structure: + + - src + - Prover.toml + - Verifier.toml + - Nargo.toml + +The source directory _src_ holds the source code for your Noir program. By default only a _main.nr_ +file will be generated within it. + +### Prover.toml + +_Prover.toml_ is used for specifying the input values for executing and proving the program. You can specify `toml` files with different names by using the `--prover-name` or `-p` flags, see the [Prover](#provertoml) section below. Optionally you may specify expected output values for prove-time checking as well. + +### Verifier.toml + +_Verifier.toml_ contains public in/output values computed when executing the Noir program. + +### Nargo.toml + +_Nargo.toml_ contains the environmental options of your project. It contains a "package" section and a "dependencies" section. + +Example Nargo.toml: + +```toml +[package] +name = "noir_starter" +type = "bin" +authors = ["Alice"] +compiler_version = "0.9.0" +description = "Getting started with Noir" +entry = "circuit/main.nr" +license = "MIT" + +[dependencies] +ecrecover = {tag = "v0.9.0", git = "https://github.com/colinnielsen/ecrecover-noir.git"} +``` + +Nargo.toml for a [workspace](../../noir/modules_packages_crates/workspaces.md) will look a bit different. For example: + +```toml +[workspace] +members = ["crates/a", "crates/b"] +default-member = "crates/a" +``` + +#### Package section + +The package section defines a number of fields including: + +- `name` (**required**) - the name of the package +- `type` (**required**) - can be "bin", "lib", or "contract" to specify whether its a binary, library or Aztec contract +- `authors` (optional) - authors of the project +- `compiler_version` - specifies the version of the compiler to use. This is enforced by the compiler and follow's [Rust's versioning](https://doc.rust-lang.org/cargo/reference/manifest.html#the-version-field), so a `compiler_version = 0.18.0` will enforce Nargo version 0.18.0, `compiler_version = ^0.18.0` will enforce anything above 0.18.0 but below 0.19.0, etc. For more information, see how [Rust handles these operators](https://docs.rs/semver/latest/semver/enum.Op.html) +- `description` (optional) +- `entry` (optional) - a relative filepath to use as the entry point into your package (overrides the default of `src/lib.nr` or `src/main.nr`) +- `backend` (optional) +- `license` (optional) + +#### Dependencies section + +This is where you will specify any dependencies for your project. See the [Dependencies page](../../noir/modules_packages_crates/dependencies.md) for more info. + +`./proofs/` and `./contract/` directories will not be immediately visible until you create a proof or +verifier contract respectively. + +### main.nr + +The _main.nr_ file contains a `main` method, this method is the entry point into your Noir program. + +In our sample program, _main.nr_ looks like this: + +```rust +fn main(x : Field, y : Field) { + assert(x != y); +} +``` + +The parameters `x` and `y` can be seen as the API for the program and must be supplied by the +prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when +verifying the proof. + +The prover supplies the values for `x` and `y` in the _Prover.toml_ file. + +As for the program body, `assert` ensures that the condition to be satisfied (e.g. `x != y`) is +constrained by the proof of the execution of said program (i.e. if the condition was not met, the +verifier would reject the proof as an invalid proof). + +### Prover.toml + +The _Prover.toml_ file is a file which the prover uses to supply his witness values(both private and +public). + +In our hello world program the _Prover.toml_ file looks like this: + +```toml +x = "1" +y = "2" +``` + +When the command `nargo prove` is executed, two processes happen: + +1. Noir creates a proof that `x`, which holds the value of `1`, and `y`, which holds the value of `2`, + is not equal. This inequality constraint is due to the line `assert(x != y)`. + +2. Noir creates and stores the proof of this statement in the _proofs_ directory in a file called your-project.proof. So if your project is named "private_voting" (defined in the project Nargo.toml), the proof will be saved at `./proofs/private_voting.proof`. Opening this file will display the proof in hex format. + +#### Arrays of Structs + +The following code shows how to pass an array of structs to a Noir program to generate a proof. + +```rust +// main.nr +struct Foo { + bar: Field, + baz: Field, +} + +fn main(foos: [Foo; 3]) -> pub Field { + foos[2].bar + foos[2].baz +} +``` + +Prover.toml: + +```toml +[[foos]] # foos[0] +bar = 0 +baz = 0 + +[[foos]] # foos[1] +bar = 0 +baz = 0 + +[[foos]] # foos[2] +bar = 1 +baz = 2 +``` + +#### Custom toml files + +You can specify a `toml` file with a different name to use for proving by using the `--prover-name` or `-p` flags. + +This command looks for proof inputs in the default **Prover.toml** and generates the proof and saves it at `./proofs/.proof`: + +```bash +nargo prove +``` + +This command looks for proof inputs in the custom **OtherProver.toml** and generates proof and saves it at `./proofs/.proof`: + +```bash +nargo prove -p OtherProver +``` + +## Verifying a Proof + +When the command `nargo verify` is executed, two processes happen: + +1. Noir checks in the _proofs_ directory for a proof file with the project name (eg. test_project.proof) + +2. If that file is found, the proof's validity is checked + +> **Note:** The validity of the proof is linked to the current Noir program; if the program is +> changed and the verifier verifies the proof, it will fail because the proof is not valid for the +> _modified_ Noir program. + +In production, the prover and the verifier are usually two separate entities. A prover would +retrieve the necessary inputs, execute the Noir program, generate a proof and pass it to the +verifier. The verifier would then retrieve the public inputs, usually from external sources, and +verify the validity of the proof against it. + +Take a private asset transfer as an example: + +A person using a browser as the prover would retrieve private inputs locally (e.g. the user's private key) and +public inputs (e.g. the user's encrypted balance on-chain), compute the transfer, generate a proof +and submit it to the verifier smart contract. + +The verifier contract would then draw the user's encrypted balance directly from the blockchain and +verify the proof submitted against it. If the verification passes, additional functions in the +verifier contract could trigger (e.g. approve the asset transfer). + +Now that you understand the concepts, you'll probably want some editor feedback while you are writing more complex code. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/installation/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/installation/_category_.json new file mode 100644 index 00000000000..0c02fb5d4d7 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/installation/_category_.json @@ -0,0 +1,6 @@ +{ + "position": 0, + "label": "Install Nargo", + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/installation/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/installation/index.md new file mode 100644 index 00000000000..4ef86aa5914 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/installation/index.md @@ -0,0 +1,48 @@ +--- +title: Nargo Installation +description: + nargo is a command line tool for interacting with Noir programs. This page is a quick guide on how to install Nargo through the most common and easy method, noirup +keywords: [ + Nargo + Noir + Rust + Cargo + Noirup + Installation + Terminal Commands + Version Check + Nightlies + Specific Versions + Branches + Noirup Repository +] +pagination_next: getting_started/hello_noir/index +--- + +`nargo` is the one-stop-shop for almost everything related with Noir. The name comes from our love for Rust and its package manager `cargo`. + +With `nargo`, you can start new projects, compile, execute, prove, verify, test, generate solidity contracts, and do pretty much all that is available in Noir. + +Similarly to `rustup`, we also maintain an easy installation method that covers most machines: `noirup`. + +## Installing Noirup + +Open a terminal on your machine, and write: + +```bash +curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +``` + +Close the terminal, open another one, and run + +```bash +noirup +``` + +Done. That's it. You should have the latest version working. You can check with `nargo --version`. + +You can also install nightlies, specific versions +or branches. Check out the [noirup repository](https://github.com/noir-lang/noirup) for more +information. + +Now we're ready to start working on [our first Noir program!](../hello_noir/index.md) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/installation/other_install_methods.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/installation/other_install_methods.md new file mode 100644 index 00000000000..3634723562b --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/installation/other_install_methods.md @@ -0,0 +1,102 @@ +--- +title: Alternative Installations +description: There are different ways to install Nargo, the one-stop shop and command-line tool for developing Noir programs. This guide explains how to specify which version to install when using noirup, and using WSL for windows. +keywords: [ + Installation + Nargo + Noirup + Binaries + Compiling from Source + WSL for Windows + macOS + Linux + Nix + Direnv + Uninstalling Nargo + ] +sidebar_position: 1 +--- + +## Encouraged Installation Method: Noirup + +Noirup is the endorsed method for installing Nargo, streamlining the process of fetching binaries or compiling from source. It supports a range of options to cater to your specific needs, from nightly builds and specific versions to compiling from various sources. + +### Installing Noirup + +First, ensure you have `noirup` installed: + +```sh +curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +``` + +### Fetching Binaries + +With `noirup`, you can easily switch between different Nargo versions, including nightly builds: + +- **Nightly Version**: Install the latest nightly build. + + ```sh + noirup --version nightly + ``` + +- **Specific Version**: Install a specific version of Nargo. + ```sh + noirup --version + ``` + +### Compiling from Source + +`noirup` also enables compiling Nargo from various sources: + +- **From a Specific Branch**: Install from the latest commit on a branch. + + ```sh + noirup --branch + ``` + +- **From a Fork**: Install from the main branch of a fork. + + ```sh + noirup --repo + ``` + +- **From a Specific Branch in a Fork**: Install from a specific branch in a fork. + + ```sh + noirup --repo --branch + ``` + +- **From a Specific Pull Request**: Install from a specific PR. + + ```sh + noirup --pr + ``` + +- **From a Specific Commit**: Install from a specific commit. + + ```sh + noirup -C + ``` + +- **From Local Source**: Compile and install from a local directory. + ```sh + noirup --path ./path/to/local/source + ``` + +## Installation on Windows + +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). + +Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. + +step 2: Follow the [Noirup instructions](#encouraged-installation-method-noirup). + +## Uninstalling Nargo + +If you installed Nargo with `noirup`, you can uninstall Nargo by removing the files in `~/.nargo`, `~/nargo`, and `~/noir_cache`. This ensures that all installed binaries, configurations, and cache related to Nargo are fully removed from your system. + +```bash +rm -r ~/.nargo +rm -r ~/nargo +rm -r ~/noir_cache +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/tooling/noir_codegen.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/tooling/noir_codegen.md new file mode 100644 index 00000000000..d65151da0ab --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/getting_started/tooling/noir_codegen.md @@ -0,0 +1,113 @@ +--- +title: Noir Codegen for TypeScript +description: Learn how to use Noir codegen to generate TypeScript bindings +keywords: [Nargo, Noir, compile, TypeScript] +sidebar_position: 2 +--- + +When using TypeScript, it is extra work to interpret Noir program outputs in a type-safe way. Third party libraries may exist for popular Noir programs, but they are either hard to find or unmaintained. + +Now you can generate TypeScript bindings for your Noir programs in two steps: +1. Exporting Noir functions using `nargo export` +2. Using the TypeScript module `noir_codegen` to generate TypeScript binding + +**Note:** you can only export functions from a Noir *library* (not binary or contract program types). + +## Installation + +### Your TypeScript project + +If you don't already have a TypeScript project you can add the module with `yarn` (or `npm`), then initialize it: + +```bash +yarn add typescript -D +npx tsc --init +``` + +### Add TypeScript module - `noir_codegen` + +The following command will add the module to your project's devDependencies: + +```bash +yarn add @noir-lang/noir_codegen -D +``` + +### Nargo library +Make sure you have Nargo, v0.25.0 or greater, installed. If you don't, follow the [installation guide](../installation/index.md). + +If you're in a new project, make a `circuits` folder and create a new Noir library: + +```bash +mkdir circuits && cd circuits +nargo new --lib myNoirLib +``` + +## Usage + +### Export ABI of specified functions + +First go to the `.nr` files in your Noir library, and add the `#[export]` macro to each function that you want to use in TypeScript. + +```rust +#[export] +fn your_function(... +``` + +From your Noir library (where `Nargo.toml` is), run the following command: + +```bash +nargo export +``` + +You will now have an `export` directory with a .json file per exported function. + +You can also specify the directory of Noir programs using `--program-dir`, for example: + +```bash +nargo export --program-dir=./circuits/myNoirLib +``` + +### Generate TypeScript bindings from exported functions + +To use the `noir-codegen` package we added to the TypeScript project: + +```bash +yarn noir-codegen ./export/your_function.json +``` + +This creates an `exports` directory with an `index.ts` file containing all exported functions. + +**Note:** adding `--out-dir` allows you to specify an output dir for your TypeScript bindings to go. Eg: + +```bash +yarn noir-codegen ./export/*.json --out-dir ./path/to/output/dir +``` + +## Example .nr function to .ts output + +Consider a Noir library with this function: + +```rust +#[export] +fn not_equal(x: Field, y: Field) -> bool { + x != y +} +``` + +After the export and codegen steps, you should have an `index.ts` like: + +```typescript +export type Field = string; + + +export const is_equal_circuit: CompiledCircuit = {"abi":{"parameters":[{"name":"x","type":{"kind":"field"},"visibility":"private"},{"name":"y","type":{"kind":"field"},"visibility":"private"}],"param_witnesses":{"x":[{"start":0,"end":1}],"y":[{"start":1,"end":2}]},"return_type":{"abi_type":{"kind":"boolean"},"visibility":"private"},"return_witnesses":[4]},"bytecode":"H4sIAAAAAAAA/7WUMQ7DIAxFQ0Krrr2JjSGYLVcpKrn/CaqqDQN12WK+hPBgmWd/wEyHbF1SS923uhOs3pfoChI+wKXMAXzIKyNj4PB0TFTYc0w5RUjoqeAeEu1wqK0F54RGkWvW44LPzExnlkbMEs4JNZmN8PxS42uHv82T8a3Jeyn2Ks+VLPcO558HmyLMCDOXAXXtpPt4R/Rt9T36ss6dS9HGPx/eG17nGegKBQAA"}; + +export async function is_equal(x: Field, y: Field, foreignCallHandler?: ForeignCallHandler): Promise { + const program = new Noir(is_equal_circuit); + const args: InputMap = { x, y }; + const { returnValue } = await program.execute(args, foreignCallHandler); + return returnValue as boolean; +} +``` + +Now the `is_equal()` function and relevant types are readily available for use in TypeScript. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/_category_.json new file mode 100644 index 00000000000..23b560f610b --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 1, + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/debugger/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/debugger/_category_.json new file mode 100644 index 00000000000..cc2cbb1c253 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/debugger/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Debugging", + "position": 5, + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/debugger/debugging_with_the_repl.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/debugger/debugging_with_the_repl.md new file mode 100644 index 00000000000..09e5bae68ad --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/debugger/debugging_with_the_repl.md @@ -0,0 +1,164 @@ +--- +title: Using the REPL Debugger +description: + Step by step guide on how to debug your Noir circuits with the REPL Debugger. +keywords: + [ + Nargo, + Noir CLI, + Noir Debugger, + REPL, + ] +sidebar_position: 1 +--- + +#### Pre-requisites + +In order to use the REPL debugger, first you need to install recent enough versions of Nargo and vscode-noir. + +## Debugging a simple circuit + +Let's debug a simple circuit: + +```rust +fn main(x : Field, y : pub Field) { + assert(x != y); +} +``` + +To start the REPL debugger, using a terminal, go to a Noir circuit's home directory. Then: + +`$ nargo debug` + +You should be seeing this in your terminal: + +``` +[main] Starting debugger +At ~/noir-examples/recursion/circuits/main/src/main.nr:1:9 + 1 -> fn main(x : Field, y : pub Field) { + 2 assert(x != y); + 3 } +> +``` + +The debugger displays the current Noir code location, and it is now waiting for us to drive it. + +Let's first take a look at the available commands. For that we'll use the `help` command. + +``` +> help +Available commands: + + opcodes display ACIR opcodes + into step into to the next opcode + next step until a new source location is reached + out step until a new source location is reached + and the current stack frame is finished + break LOCATION:OpcodeLocation add a breakpoint at an opcode location + over step until a new source location is reached + without diving into function calls + restart restart the debugging session + delete LOCATION:OpcodeLocation delete breakpoint at an opcode location + witness show witness map + witness index:u32 display a single witness from the witness map + witness index:u32 value:String update a witness with the given value + memset index:usize value:String update a memory cell with the given + value + continue continue execution until the end of the + program + vars show variable values available at this point + in execution + stacktrace display the current stack trace + memory show memory (valid when executing unconstrained code) + step step to the next ACIR opcode + +Other commands: + + help Show this help message + quit Quit repl + +``` + +Some commands operate only for unconstrained functions, such as `memory` and `memset`. If you try to use them while execution is paused at an ACIR opcode, the debugger will simply inform you that you are not executing unconstrained code: + +``` +> memory +Unconstrained VM memory not available +> +``` + +Before continuing, we can take a look at the initial witness map: + +``` +> witness +_0 = 1 +_1 = 2 +> +``` + +Cool, since `x==1`, `y==2`, and we want to check that `x != y`, our circuit should succeed. At this point we could intervene and use the witness setter command to change one of the witnesses. Let's set `y=3`, then back to 2, so we don't affect the expected result: + +``` +> witness +_0 = 1 +_1 = 2 +> witness 1 3 +_1 = 3 +> witness +_0 = 1 +_1 = 3 +> witness 1 2 +_1 = 2 +> witness +_0 = 1 +_1 = 2 +> +``` + +Now we can inspect the current state of local variables. For that we use the `vars` command. + +``` +> vars +> +``` + +We currently have no vars in context, since we are at the entry point of the program. Let's use `next` to execute until the next point in the program. + +``` +> vars +> next +At ~/noir-examples/recursion/circuits/main/src/main.nr:1:20 + 1 -> fn main(x : Field, y : pub Field) { + 2 assert(x != y); + 3 } +> vars +x:Field = 0x01 +``` + +As a result of stepping, the variable `x`, whose initial value comes from the witness map, is now in context and returned by `vars`. + +``` +> next + 1 fn main(x : Field, y : pub Field) { + 2 -> assert(x != y); + 3 } +> vars +y:Field = 0x02 +x:Field = 0x01 +``` + +Stepping again we can finally see both variables and their values. And now we can see that the next assertion should succeed. + +Let's continue to the end: + +``` +> continue +(Continuing execution...) +Finished execution +> q +[main] Circuit witness successfully solved +``` + +Upon quitting the debugger after a solved circuit, the resulting circuit witness gets saved, equivalent to what would happen if we had run the same circuit with `nargo execute`. + +We just went through the basics of debugging using Noir REPL debugger. For a comprehensive reference, check out [the reference page](../../reference/debugger/debugger_repl.md). diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/debugger/debugging_with_vs_code.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/debugger/debugging_with_vs_code.md new file mode 100644 index 00000000000..a5858c1a5eb --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/debugger/debugging_with_vs_code.md @@ -0,0 +1,68 @@ +--- +title: Using the VS Code Debugger +description: + Step by step guide on how to debug your Noir circuits with the VS Code Debugger configuration and features. +keywords: + [ + Nargo, + Noir CLI, + Noir Debugger, + VS Code, + IDE, + ] +sidebar_position: 0 +--- + +This guide will show you how to use VS Code with the vscode-noir extension to debug a Noir project. + +#### Pre-requisites + +- Nargo +- vscode-noir +- A Noir project with a `Nargo.toml`, `Prover.toml` and at least one Noir (`.nr`) containing an entry point function (typically `main`). + +## Running the debugger + +The easiest way to start debugging is to open the file you want to debug, and press `F5`. This will cause the debugger to launch, using your `Prover.toml` file as input. + +You should see something like this: + +![Debugger launched](@site/static/img/debugger/1-started.png) + +Let's inspect the state of the program. For that, we open VS Code's _Debug pane_. Look for this icon: + +![Debug pane icon](@site/static/img/debugger/2-icon.png) + +You will now see two categories of variables: Locals and Witness Map. + +![Debug pane expanded](@site/static/img/debugger/3-debug-pane.png) + +1. **Locals**: variables of your program. At this point in execution this section is empty, but as we step through the code it will get populated by `x`, `result`, `digest`, etc. + +2. **Witness map**: these are initially populated from your project's `Prover.toml` file. In this example, they will be used to populate `x` and `result` at the beginning of the `main` function. + +Most of the time you will probably be focusing mostly on locals, as they represent the high level state of your program. + +You might be interested in inspecting the witness map in case you are trying to solve a really low level issue in the compiler or runtime itself, so this concerns mostly advanced or niche users. + +Let's step through the program, by using the debugger buttons or their corresponding keyboard shortcuts. + +![Debugger buttons](@site/static/img/debugger/4-debugger-buttons.png) + +Now we can see in the variables pane that there's values for `digest`, `result` and `x`. + +![Inspecting locals](@site/static/img/debugger/5-assert.png) + +We can also inspect the values of variables by directly hovering on them on the code. + +![Hover locals](@site/static/img/debugger/6-hover.png) + +Let's set a break point at the `keccak256` function, so we can continue execution up to the point when it's first invoked without having to go one step at a time. + +We just need to click the to the right of the line number 18. Once the breakpoint appears, we can click the `continue` button or use its corresponding keyboard shortcut (`F5` by default). + +![Breakpoint](@site/static/img/debugger/7-break.png) + +Now we are debugging the `keccak256` function, notice the _Call Stack pane_ at the lower right. This lets us inspect the current call stack of our process. + +That covers most of the current debugger functionalities. Check out [the reference](../../reference/debugger/debugger_vscode.md) for more details on how to configure the debugger. \ No newline at end of file diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/how-to-oracles.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/how-to-oracles.md new file mode 100644 index 00000000000..8cf8035a5c4 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/how-to-oracles.md @@ -0,0 +1,276 @@ +--- +title: How to use Oracles +description: Learn how to use oracles in your Noir program with examples in both Nargo and NoirJS. This guide also covers writing a JSON RPC server and providing custom foreign call handlers for NoirJS. +keywords: + - Noir Programming + - Oracles + - Nargo + - NoirJS + - JSON RPC Server + - Foreign Call Handlers +sidebar_position: 1 +--- + +This guide shows you how to use oracles in your Noir program. For the sake of clarity, it assumes that: + +- You have read the [explainer on Oracles](../explainers/explainer-oracle.md) and are comfortable with the concept. +- You have a Noir program to add oracles to. You can create one using the [vite-hardhat starter](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat) as a boilerplate. +- You understand the concept of a JSON-RPC server. Visit the [JSON-RPC website](https://www.jsonrpc.org/) if you need a refresher. +- You are comfortable with server-side JavaScript (e.g. Node.js, managing packages, etc.). + +For reference, you can find the snippets used in this tutorial on the [Aztec DevRel Repository](https://github.com/AztecProtocol/dev-rel/tree/main/code-snippets/how-to-oracles). + +## Rundown + +This guide has 3 major steps: + +1. How to modify our Noir program to make use of oracle calls as unconstrained functions +2. How to write a JSON RPC Server to resolve these oracle calls with Nargo +3. How to use them in Nargo and how to provide a custom resolver in NoirJS + +## Step 1 - Modify your Noir program + +An oracle is defined in a Noir program by defining two methods: + +- An unconstrained method - This tells the compiler that it is executing an [unconstrained functions](../noir/concepts//unconstrained.md). +- A decorated oracle method - This tells the compiler that this method is an RPC call. + +An example of an oracle that returns a `Field` would be: + +```rust +#[oracle(getSqrt)] +unconstrained fn sqrt(number: Field) -> Field { } + +unconstrained fn get_sqrt(number: Field) -> Field { + sqrt(number) +} +``` + +In this example, we're wrapping our oracle function in a unconstrained method, and decorating it with `oracle(getSqrt)`. We can then call the unconstrained function as we would call any other function: + +```rust +fn main(input: Field) { + let sqrt = get_sqrt(input); +} +``` + +In the next section, we will make this `getSqrt` (defined on the `sqrt` decorator) be a method of the RPC server Noir will use. + +:::danger + +As explained in the [Oracle Explainer](../explainers/explainer-oracle.md), this `main` function is unsafe unless you constrain its return value. For example: + +```rust +fn main(input: Field) { + let sqrt = get_sqrt(input); + assert(sqrt.pow_32(2) as u64 == input as u64); // <---- constrain the return of an oracle! +} +``` + +::: + +:::info + +Currently, oracles only work with single params or array params. For example: + +```rust +#[oracle(getSqrt)] +unconstrained fn sqrt([Field; 2]) -> [Field; 2] { } +``` + +::: + +## Step 2 - Write an RPC server + +Brillig will call *one* RPC server. Most likely you will have to write your own, and you can do it in whatever language you prefer. In this guide, we will do it in Javascript. + +Let's use the above example of an oracle that consumes an array with two `Field` and returns their square roots: + +```rust +#[oracle(getSqrt)] +unconstrained fn sqrt(input: [Field; 2]) -> [Field; 2] { } + +unconstrained fn get_sqrt(input: [Field; 2]) -> [Field; 2] { + sqrt(input) +} + +fn main(input: [Field; 2]) { + let sqrt = get_sqrt(input); + assert(sqrt[0].pow_32(2) as u64 == input[0] as u64); + assert(sqrt[1].pow_32(2) as u64 == input[1] as u64); +} +``` + +:::info + +Why square root? + +In general, computing square roots is computationally more expensive than multiplications, which takes a toll when speaking about ZK applications. In this case, instead of calculating the square root in Noir, we are using our oracle to offload that computation to be made in plain. In our circuit we can simply multiply the two values. + +::: + +Now, we should write the correspondent RPC server, starting with the [default JSON-RPC 2.0 boilerplate](https://www.npmjs.com/package/json-rpc-2.0#example): + +```js +import { JSONRPCServer } from "json-rpc-2.0"; +import express from "express"; +import bodyParser from "body-parser"; + +const app = express(); +app.use(bodyParser.json()); + +const server = new JSONRPCServer(); +app.post("/", (req, res) => { + const jsonRPCRequest = req.body; + server.receive(jsonRPCRequest).then((jsonRPCResponse) => { + if (jsonRPCResponse) { + res.json(jsonRPCResponse); + } else { + res.sendStatus(204); + } + }); +}); + +app.listen(5555); +``` + +Now, we will add our `getSqrt` method, as expected by the `#[oracle(getSqrt)]` decorator in our Noir code. It maps through the params array and returns their square roots: + +```js +server.addMethod("getSqrt", async (params) => { + const values = params[0].Array.map((field) => { + return `${Math.sqrt(parseInt(field, 16))}`; + }); + return { values: [{ Array: values }] }; +}); +``` + +:::tip + +Brillig expects an object with an array of values. Each value is an object declaring to be `Single` or `Array` and returning a field element *as a string*. For example: + +```json +{ "values": [{ "Array": ["1", "2"] }]} +{ "values": [{ "Single": "1" }]} +{ "values": [{ "Single": "1" }, { "Array": ["1", "2"] }]} +``` + +If you're using Typescript, the following types may be helpful in understanding the expected return value and making sure they're easy to follow: + +```js +interface SingleForeignCallParam { + Single: string, +} + +interface ArrayForeignCallParam { + Array: string[], +} + +type ForeignCallParam = SingleForeignCallParam | ArrayForeignCallParam; + +interface ForeignCallResult { + values: ForeignCallParam[], +} +``` + +::: + +## Step 3 - Usage with Nargo + +Using the [`nargo` CLI tool](../getting_started/installation/index.md), you can use oracles in the `nargo test`, `nargo execute` and `nargo prove` commands by passing a value to `--oracle-resolver`. For example: + +```bash +nargo test --oracle-resolver http://localhost:5555 +``` + +This tells `nargo` to use your RPC Server URL whenever it finds an oracle decorator. + +## Step 4 - Usage with NoirJS + +In a JS environment, an RPC server is not strictly necessary, as you may want to resolve your oracles without needing any JSON call at all. NoirJS simply expects that you pass a callback function when you generate proofs, and that callback function can be anything. + +For example, if your Noir program expects the host machine to provide CPU pseudo-randomness, you could simply pass it as the `foreignCallHandler`. You don't strictly need to create an RPC server to serve pseudo-randomness, as you may as well get it directly in your app: + +```js +const foreignCallHandler = (name, inputs) => crypto.randomBytes(16) // etc + +await noir.generateProof(inputs, foreignCallHandler) +``` + +As one can see, in NoirJS, the [`foreignCallHandler`](../reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md) function simply means "a callback function that returns a value of type [`ForeignCallOutput`](../reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md). It doesn't have to be an RPC call like in the case for Nargo. + +:::tip + +Does this mean you don't have to write an RPC server like in [Step #2](#step-2---write-an-rpc-server)? + +You don't technically have to, but then how would you run `nargo test` or `nargo prove`? To use both `Nargo` and `NoirJS` in your development flow, you will have to write a JSON RPC server. + +::: + +In this case, let's make `foreignCallHandler` call the JSON RPC Server we created in [Step #2](#step-2---write-an-rpc-server), by making it a JSON RPC Client. + +For example, using the same `getSqrt` program in [Step #1](#step-1---modify-your-noir-program) (comments in the code): + +```js +import { JSONRPCClient } from "json-rpc-2.0"; + +// declaring the JSONRPCClient +const client = new JSONRPCClient((jsonRPCRequest) => { +// hitting the same JSON RPC Server we coded above + return fetch("http://localhost:5555", { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify(jsonRPCRequest), + }).then((response) => { + if (response.status === 200) { + return response + .json() + .then((jsonRPCResponse) => client.receive(jsonRPCResponse)); + } else if (jsonRPCRequest.id !== undefined) { + return Promise.reject(new Error(response.statusText)); + } + }); +}); + +// declaring a function that takes the name of the foreign call (getSqrt) and the inputs +const foreignCallHandler = async (name, input) => { + // notice that the "inputs" parameter contains *all* the inputs + // in this case we to make the RPC request with the first parameter "numbers", which would be input[0] + const oracleReturn = await client.request(name, [ + { Array: input[0].map((i) => i.toString("hex")) }, + ]); + return [oracleReturn.values[0].Array]; +}; + +// the rest of your NoirJS code +const input = { input: [4, 16] }; +const { witness } = await noir.execute(numbers, foreignCallHandler); +``` + +:::tip + +If you're in a NoirJS environment running your RPC server together with a frontend app, you'll probably hit a familiar problem in full-stack development: requests being blocked by [CORS](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) policy. For development only, you can simply install and use the [`cors` npm package](https://www.npmjs.com/package/cors) to get around the problem: + +```bash +yarn add cors +``` + +and use it as a middleware: + +```js +import cors from "cors"; + +const app = express(); +app.use(cors()) +``` + +::: + +## Conclusion + +Hopefully by the end of this guide, you should be able to: + +- Write your own logic around Oracles and how to write a JSON RPC server to make them work with your Nargo commands. +- Provide custom foreign call handlers for NoirJS. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/how-to-recursion.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/how-to-recursion.md new file mode 100644 index 00000000000..4c45bb87ae2 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/how-to-recursion.md @@ -0,0 +1,179 @@ +--- +title: How to use recursion on NoirJS +description: Learn how to implement recursion with NoirJS, a powerful tool for creating smart contracts on the EVM blockchain. This guide assumes familiarity with NoirJS, solidity verifiers, and the Barretenberg proving backend. Discover how to generate both final and intermediate proofs using `noir_js` and `backend_barretenberg`. +keywords: + [ + "NoirJS", + "EVM blockchain", + "smart contracts", + "recursion", + "solidity verifiers", + "Barretenberg backend", + "noir_js", + "backend_barretenberg", + "intermediate proofs", + "final proofs", + "nargo compile", + "json import", + "recursive circuit", + "recursive app" + ] +sidebar_position: 1 +--- + +This guide shows you how to use recursive proofs in your NoirJS app. For the sake of clarity, it is assumed that: + +- You already have a NoirJS app. If you don't, please visit the [NoirJS tutorial](../tutorials/noirjs_app.md) and the [reference](../reference/NoirJS/noir_js/index.md). +- You are familiar with what are recursive proofs and you have read the [recursion explainer](../explainers/explainer-recursion.md) +- You already built a recursive circuit following [the reference](../noir/standard_library/recursion.md), and understand how it works. + +It is also assumed that you're not using `noir_wasm` for compilation, and instead you've used [`nargo compile`](../reference/nargo_commands.md) to generate the `json` you're now importing into your project. However, the guide should work just the same if you're using `noir_wasm`. + +:::info + +As you've read in the [explainer](../explainers/explainer-recursion.md), a recursive proof is an intermediate proof. This means that it doesn't necessarily generate the final step that makes it verifiable in a smart contract. However, it is easy to verify within another circuit. + +While "standard" usage of NoirJS packages abstracts final proofs, it currently lacks the necessary interface to abstract away intermediate proofs. This means that these proofs need to be created by using the backend directly. + +In short: + +- `noir_js` generates *only* final proofs +- `backend_barretenberg` generates both types of proofs + +::: + +In a standard recursive app, you're also dealing with at least two circuits. For the purpose of this guide, we will assume the following: + +- `main`: a circuit of type `assert(x != y)`, where `main` is marked with a `#[recursive]` attribute. This attribute states that the backend should generate proofs that are friendly for verification within another circuit. +- `recursive`: a circuit that verifies `main` + +For a full example on how recursive proofs work, please refer to the [noir-examples](https://github.com/noir-lang/noir-examples) repository. We will *not* be using it as a reference for this guide. + +## Step 1: Setup + +In a common NoirJS app, you need to instantiate a backend with something like `const backend = new Backend(circuit)`. Then you feed it to the `noir_js` interface. + +For recursion, this doesn't happen, and the only need for `noir_js` is only to `execute` a circuit and get its witness and return value. Everything else is not interfaced, so it needs to happen on the `backend` object. + +It is also recommended that you instantiate the backend with as many threads as possible, to allow for maximum concurrency: + +```js +const backend = new Backend(circuit, { threads: 8 }) +``` + +:::tip +You can use the [`os.cpus()`](https://nodejs.org/api/os.html#oscpus) object in `nodejs` or [`navigator.hardwareConcurrency`](https://developer.mozilla.org/en-US/docs/Web/API/Navigator/hardwareConcurrency) on the browser to make the most out of those glorious cpu cores +::: + +## Step 2: Generating the witness and the proof for `main` + +After instantiating the backend, you should also instantiate `noir_js`. We will use it to execute the circuit and get the witness. + +```js +const noir = new Noir(circuit, backend) +const { witness } = noir.execute(input) +``` + +With this witness, you are now able to generate the intermediate proof for the main circuit: + +```js +const { proof, publicInputs } = await backend.generateProof(witness) +``` + +:::warning + +Always keep in mind what is actually happening on your development process, otherwise you'll quickly become confused about what circuit we are actually running and why! + +In this case, you can imagine that Alice (running the `main` circuit) is proving something to Bob (running the `recursive` circuit), and Bob is verifying her proof within his proof. + +With this in mind, it becomes clear that our intermediate proof is the one *meant to be verified within another circuit*, so it must be Alice's. Actually, the only final proof in this theoretical scenario would be the last one, sent on-chain. + +::: + +## Step 3 - Verification and proof artifacts + +Optionally, you are able to verify the intermediate proof: + +```js +const verified = await backend.verifyProof({ proof, publicInputs }) +``` + +This can be useful to make sure our intermediate proof was correctly generated. But the real goal is to do it within another circuit. For that, we need to generate recursive proof artifacts that will be passed to the circuit that is verifying the proof we just generated. Instead of passing the proof and verification key as a byte array, we pass them as fields which makes it cheaper to verify in a circuit: + +```js +const { proofAsFields, vkAsFields, vkHash } = await backend.generateRecursiveProofArtifacts( { publicInputs, proof }, publicInputsCount) +``` + +This call takes the public inputs and the proof, but also the public inputs count. While this is easily retrievable by simply counting the `publicInputs` length, the backend interface doesn't currently abstract it away. + +:::info + +The `proofAsFields` has a constant size `[Field; 93]` and verification keys in Barretenberg are always `[Field; 114]`. + +::: + +:::warning + +One common mistake is to forget *who* makes this call. + +In a situation where Alice is generating the `main` proof, if she generates the proof artifacts and sends them to Bob, which gladly takes them as true, this would mean Alice could prove anything! + +Instead, Bob needs to make sure *he* extracts the proof artifacts, using his own instance of the `main` circuit backend. This way, Alice has to provide a valid proof for the correct `main` circuit. + +::: + +## Step 4 - Recursive proof generation + +With the artifacts, generating a recursive proof is no different from a normal proof. You simply use the `backend` (with the recursive circuit) to generate it: + +```js +const recursiveInputs = { + verification_key: vkAsFields, // array of length 114 + proof: proofAsFields, // array of length 93 + size of public inputs + publicInputs: [mainInput.y], // using the example above, where `y` is the only public input + key_hash: vkHash, +} + +const { witness, returnValue } = noir.execute(recursiveInputs) // we're executing the recursive circuit now! +const { proof, publicInputs } = backend.generateProof(witness) +const verified = backend.verifyProof({ proof, publicInputs }) +``` + +You can obviously chain this proof into another proof. In fact, if you're using recursive proofs, you're probably interested of using them this way! + +:::tip + +Managing circuits and "who does what" can be confusing. To make sure your naming is consistent, you can keep them in an object. For example: + +```js +const circuits = { + main: mainJSON, + recursive: recursiveJSON +} +const backends = { + main: new BarretenbergBackend(circuits.main), + recursive: new BarretenbergBackend(circuits.recursive) +} +const noir_programs = { + main: new Noir(circuits.main, backends.main), + recursive: new Noir(circuits.recursive, backends.recursive) +} +``` + +This allows you to neatly call exactly the method you want without conflicting names: + +```js +// Alice runs this 👇 +const { witness: mainWitness } = await noir_programs.main.execute(input) +const proof = await backends.main.generateProof(mainWitness) + +// Bob runs this 👇 +const verified = await backends.main.verifyProof(proof) +const { proofAsFields, vkAsFields, vkHash } = await backends.main.generateRecursiveProofArtifacts( + proof, + numPublicInputs, +); +const recursiveProof = await noir_programs.recursive.generateProof(recursiveInputs) +``` + +::: diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/how-to-solidity-verifier.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/how-to-solidity-verifier.md new file mode 100644 index 00000000000..e3c7c1065da --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/how-to-solidity-verifier.md @@ -0,0 +1,231 @@ +--- +title: Generate a Solidity Verifier +description: + Learn how to run the verifier as a smart contract on the blockchain. Compile a Solidity verifier + contract for your Noir program and deploy it on any EVM blockchain acting as a verifier smart + contract. Read more to find out +keywords: + [ + solidity verifier, + smart contract, + blockchain, + compiler, + plonk_vk.sol, + EVM blockchain, + verifying Noir programs, + proving backend, + Barretenberg, + ] +sidebar_position: 0 +pagination_next: tutorials/noirjs_app +--- + +Noir has the ability to generate a verifier contract in Solidity, which can be deployed in many EVM-compatible blockchains such as Ethereum. + +This allows for a powerful feature set, as one can make use of the conciseness and the privacy provided by Noir in an immutable ledger. Applications can range from simple P2P guessing games, to complex private DeFi interactions. + +This guide shows you how to generate a Solidity Verifier and deploy it on the [Remix IDE](https://remix.ethereum.org/). It is assumed that: + +- You are comfortable with the Solidity programming language and understand how contracts are deployed on the Ethereum network +- You have Noir installed and you have a Noir program. If you don't, [get started](../getting_started/installation/index.md) with Nargo and the example Hello Noir circuit +- You are comfortable navigating RemixIDE. If you aren't or you need a refresher, you can find some video tutorials [here](https://www.youtube.com/channel/UCjTUPyFEr2xDGN6Cg8nKDaA) that could help you. + +## Rundown + +Generating a Solidity Verifier contract is actually a one-command process. However, compiling it and deploying it can have some caveats. Here's the rundown of this guide: + +1. How to generate a solidity smart contract +2. How to compile the smart contract in the RemixIDE +3. How to deploy it to a testnet + +## Step 1 - Generate a contract + +This is by far the most straight-forward step. Just run: + +```sh +nargo codegen-verifier +``` + +A new `contract` folder would then be generated in your project directory, containing the Solidity +file `plonk_vk.sol`. It can be deployed to any EVM blockchain acting as a verifier smart contract. + +:::info + +It is possible to generate verifier contracts of Noir programs for other smart contract platforms as long as the proving backend supplies an implementation. + +Barretenberg, the default proving backend for Nargo, supports generation of verifier contracts, for the time being these are only in Solidity. +::: + +## Step 2 - Compiling + +We will mostly skip the details of RemixIDE, as the UI can change from version to version. For now, we can just open +Remix and create a blank workspace. + +![Create Workspace](@site/static/img/how-tos/solidity_verifier_1.png) + +We will create a new file to contain the contract Nargo generated, and copy-paste its content. + +:::warning + +You'll likely see a warning advising you to not trust pasted code. While it is an important warning, it is irrelevant in the context of this guide and can be ignored. We will not be deploying anywhere near a mainnet. + +::: + +To compile our the verifier, we can navigate to the compilation tab: + +![Compilation Tab](@site/static/img/how-tos/solidity_verifier_2.png) + +Remix should automatically match a suitable compiler version. However, hitting the "Compile" button will most likely generate a "Stack too deep" error: + +![Stack too deep](@site/static/img/how-tos/solidity_verifier_3.png) + +This is due to the verify function needing to put many variables on the stack, but enabling the optimizer resolves the issue. To do this, let's open the "Advanced Configurations" tab and enable optimization. The default 200 runs will suffice. + +:::info + +This time we will see a warning about an unused function parameter. This is expected, as the `verify` function doesn't use the `_proof` parameter inside a solidity block, it is loaded from calldata and used in assembly. + +::: + +![Compilation success](@site/static/img/how-tos/solidity_verifier_4.png) + +## Step 3 - Deploying + +At this point we should have a compiled contract read to deploy. If we navigate to the deploy section in Remix, we will see many different environments we can deploy to. The steps to deploy on each environment would be out-of-scope for this guide, so we will just use the default Remix VM. + +Looking closely, we will notice that our "Solidity Verifier" is actually three contracts working together: + +- An `UltraVerificationKey` library which simply stores the verification key for our circuit. +- An abstract contract `BaseUltraVerifier` containing most of the verifying logic. +- A main `UltraVerifier` contract that inherits from the Base and uses the Key contract. + +Remix will take care of the dependencies for us so we can simply deploy the UltraVerifier contract by selecting it and hitting "deploy": + +![Deploying UltraVerifier](@site/static/img/how-tos/solidity_verifier_5.png) + +A contract will show up in the "Deployed Contracts" section, where we can retrieve the Verification Key Hash. This is particularly useful for double-checking the deployer contract is the correct one. + +:::note + +Why "UltraVerifier"? + +To be precise, the Noir compiler (`nargo`) doesn't generate the verifier contract directly. It compiles the Noir code into an intermediate language (ACIR), which is then executed by the backend. So it is the backend that returns the verifier smart contract, not Noir. + +In this case, the Barretenberg Backend uses the UltraPlonk proving system, hence the "UltraVerifier" name. + +::: + +## Step 4 - Verifying + +To verify a proof using the Solidity verifier contract, we call the `verify` function in this extended contract: + +```solidity +function verify(bytes calldata _proof, bytes32[] calldata _publicInputs) external view returns (bool) +``` + +When using the default example in the [Hello Noir](../getting_started/hello_noir/index.md) guide, the easiest way to confirm that the verifier contract is doing its job is by calling the `verify` function via remix with the required parameters. For `_proof`, run `nargo prove` and use the string in `proof/.proof` (adding the hex `0x` prefix). We can also copy the public input from `Verifier.toml`, as it will be properly formatted as 32-byte strings: + +``` +0x...... , [0x0000.....02] +``` + +A programmatic example of how the `verify` function is called can be seen in the example zk voting application [here](https://github.com/noir-lang/noir-examples/blob/33e598c257e2402ea3a6b68dd4c5ad492bce1b0a/foundry-voting/src/zkVote.sol#L35): + +```solidity +function castVote(bytes calldata proof, uint proposalId, uint vote, bytes32 nullifierHash) public returns (bool) { + // ... + bytes32[] memory publicInputs = new bytes32[](4); + publicInputs[0] = merkleRoot; + publicInputs[1] = bytes32(proposalId); + publicInputs[2] = bytes32(vote); + publicInputs[3] = nullifierHash; + require(verifier.verify(proof, publicInputs), "Invalid proof"); +``` + +:::info[Return Values] + +A circuit doesn't have the concept of a return value. Return values are just syntactic sugar in +Noir. + +Under the hood, the return value is passed as an input to the circuit and is checked at the end of +the circuit program. + +For example, if you have Noir program like this: + +```rust +fn main( + // Public inputs + pubkey_x: pub Field, + pubkey_y: pub Field, + // Private inputs + priv_key: Field, +) -> pub Field +``` + +the `verify` function will expect the public inputs array (second function parameter) to be of length 3, the two inputs and the return value. Like before, these values are populated in Verifier.toml after running `nargo prove`. + +Passing only two inputs will result in an error such as `PUBLIC_INPUT_COUNT_INVALID(3, 2)`. + +In this case, the inputs parameter to `verify` would be an array ordered as `[pubkey_x, pubkey_y, return]`. + +::: + +:::tip[Structs] + +You can pass structs to the verifier contract. They will be flattened so that the array of inputs is 1-dimensional array. + +For example, consider the following program: + +```rust +struct Type1 { + val1: Field, + val2: Field, +} + +struct Nested { + t1: Type1, + is_true: bool, +} + +fn main(x: pub Field, nested: pub Nested, y: pub Field) { + //... +} +``` + +The order of these inputs would be flattened to: `[x, nested.t1.val1, nested.t1.val2, nested.is_true, y]` + +::: + +The other function you can call is our entrypoint `verify` function, as defined above. + +:::tip + +It's worth noticing that the `verify` function is actually a `view` function. A `view` function does not alter the blockchain state, so it doesn't need to be distributed (i.e. it will run only on the executing node), and therefore doesn't cost any gas. + +This can be particularly useful in some situations. If Alice generated a proof and wants Bob to verify its correctness, Bob doesn't need to run Nargo, NoirJS, or any Noir specific infrastructure. He can simply make a call to the blockchain with the proof and verify it is correct without paying any gas. + +It would be incorrect to say that a Noir proof verification costs any gas at all. However, most of the time the result of `verify` is used to modify state (for example, to update a balance, a game state, etc). In that case the whole network needs to execute it, which does incur gas costs (calldata and execution, but not storage). + +::: + +## A Note on EVM chains + +ZK-SNARK verification depends on some precompiled cryptographic primitives such as Elliptic Curve Pairings (if you like complex math, you can read about EC Pairings [here](https://medium.com/@VitalikButerin/exploring-elliptic-curve-pairings-c73c1864e627)). Not all EVM chains support EC Pairings, notably some of the ZK-EVMs. This means that you won't be able to use the verifier contract in all of them. + +For example, chains like `zkSync ERA` and `Polygon zkEVM` do not currently support these precompiles, so proof verification via Solidity verifier contracts won't work. Here's a quick list of EVM chains that have been tested and are known to work: + +- Optimism +- Arbitrum +- Polygon PoS +- Scroll +- Celo + +If you test any other chains, please open a PR on this page to update the list. See [this doc](https://github.com/noir-lang/noir-starter/tree/main/with-foundry#testing-on-chain) for more info about testing verifier contracts on different EVM chains. + +## What's next + +Now that you know how to call a Noir Solidity Verifier on a smart contract using Remix, you should be comfortable with using it with some programmatic frameworks, such as [hardhat](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat) and [foundry](https://github.com/noir-lang/noir-starter/tree/main/with-foundry). + +You can find other tools, examples, boilerplates and libraries in the [awesome-noir](https://github.com/noir-lang/awesome-noir) repository. + +You should also be ready to write and deploy your first NoirJS app and start generating proofs on websites, phones, and NodeJS environments! Head on to the [NoirJS tutorial](../tutorials/noirjs_app.md) to learn how to do that. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/merkle-proof.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/merkle-proof.mdx new file mode 100644 index 00000000000..16c425bed76 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/merkle-proof.mdx @@ -0,0 +1,49 @@ +--- +title: Prove Merkle Tree Membership +description: + Learn how to use merkle membership proof in Noir to prove that a given leaf is a member of a + merkle tree with a specified root, at a given index. +keywords: + [merkle proof, merkle membership proof, Noir, rust, hash function, Pedersen, sha256, merkle tree] +sidebar_position: 4 +--- + +Let's walk through an example of a merkle membership proof in Noir that proves that a given leaf is +in a merkle tree. + +```rust +use dep::std; + +fn main(message : [Field; 62], index : Field, hashpath : [Field; 40], root : Field) { + let leaf = std::hash::hash_to_field(message.as_slice()); + let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); + assert(merkle_root == root); +} + +``` + +The message is hashed using `hash_to_field`. The specific hash function that is being used is chosen +by the backend. The only requirement is that this hash function can heuristically be used as a +random oracle. If only collision resistance is needed, then one can call `std::hash::pedersen_hash` +instead. + +```rust +let leaf = std::hash::hash_to_field(message.as_slice()); +``` + +The leaf is then passed to a compute_merkle_root function with the root, index and hashpath. The returned root can then be asserted to be the same as the provided root. + +```rust +let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); +assert (merkle_root == root); +``` + +> **Note:** It is possible to re-implement the merkle tree implementation without standard library. +> However, for most usecases, it is enough. In general, the standard library will always opt to be +> as conservative as possible, while striking a balance with efficiency. + +An example, the merkle membership proof, only requires a hash function that has collision +resistance, hence a hash function like Pedersen is allowed, which in most cases is more efficient +than the even more conservative sha256. + +[View an example on the starter repo](https://github.com/noir-lang/noir-examples/blob/3ea09545cabfa464124ec2f3ea8e60c608abe6df/stealthdrop/circuits/src/main.nr#L20) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/using-devcontainers.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/using-devcontainers.mdx new file mode 100644 index 00000000000..727ec6ca667 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/how_to/using-devcontainers.mdx @@ -0,0 +1,110 @@ +--- +title: Developer Containers and Codespaces +description: "Learn how to set up a devcontainer in your GitHub repository for a seamless coding experience with Codespaces. Follow our easy 8-step guide to create your own Noir environment without installing Nargo locally." +keywords: ["Devcontainer", "Codespaces", "GitHub", "Noir Environment", "Docker Image", "Development Environment", "Remote Coding", "GitHub Codespaces", "Noir Programming", "Nargo", "VSCode Extensions", "Noirup"] +sidebar_position: 1 +--- + +Adding a developer container configuration file to your Noir project is one of the easiest way to unlock coding in browser. + +## What's a devcontainer after all? + +A [Developer Container](https://containers.dev/) (devcontainer for short) is a Docker image that comes preloaded with tools, extensions, and other tools you need to quickly get started or continue a project, without having to install Nargo locally. Think of it as a development environment in a box. + +There are many advantages to this: + +- It's platform and architecture agnostic +- You don't need to have an IDE installed, or Nargo, or use a terminal at all +- It's safer for using on a public machine or public network + +One of the best ways of using devcontainers is... not using your machine at all, for maximum control, performance, and ease of use. +Enter Codespaces. + +## Codespaces + +If a devcontainer is just a Docker image, then what stops you from provisioning a `p3dn.24xlarge` AWS EC2 instance with 92 vCPUs and 768 GiB RAM and using it to prove your 10-gate SNARK proof? + +Nothing! Except perhaps the 30-40$ per hour it will cost you. + +The problem is that provisioning takes time, and I bet you don't want to see the AWS console every time you want to code something real quick. + +Fortunately, there's an easy and free way to get a decent remote machine ready and loaded in less than 2 minutes: Codespaces. [Codespaces is a Github feature](https://github.com/features/codespaces) that allows you to code in a remote machine by using devcontainers, and it's pretty cool: + +- You can start coding Noir in less than a minute +- It uses the resources of a remote machine, so you can code on your grandma's phone if needed be +- It makes it easy to share work with your frens +- It's fully reusable, you can stop and restart whenever you need to + +:::info + +Don't take out your wallet just yet. Free GitHub accounts get about [15-60 hours of coding](https://github.com/features/codespaces) for free per month, depending on the size of your provisioned machine. + +::: + +## Tell me it's _actually_ easy + +It is! + +Github comes with a default codespace and you can use it to code your own devcontainer. That's exactly what we will be doing in this guide. + + + +8 simple steps: + +#### 1. Create a new repository on GitHub. + +#### 2. Click "Start coding with Codespaces". This will use the default image. + +#### 3. Create a folder called `.devcontainer` in the root of your repository. + +#### 4. Create a Dockerfile in that folder, and paste the following code: + +```docker +FROM --platform=linux/amd64 node:lts-bookworm-slim +SHELL ["/bin/bash", "-c"] +RUN apt update && apt install -y curl bash git tar gzip libc++-dev +RUN curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +ENV PATH="/root/.nargo/bin:$PATH" +RUN noirup +ENTRYPOINT ["nargo"] +``` +#### 5. Create a file called `devcontainer.json` in the same folder, and paste the following code: + +```json +{ + "name": "Noir on Codespaces", + "build": { + "context": ".", + "dockerfile": "Dockerfile" + }, + "customizations": { + "vscode": { + "extensions": ["noir-lang.vscode-noir"] + } + } +} +``` +#### 6. Commit and push your changes + +This will pull the new image and build it, so it could take a minute or so + +#### 8. Done! +Just wait for the build to finish, and there's your easy Noir environment. + + +Refer to [noir-starter](https://github.com/noir-lang/noir-starter/) as an example of how devcontainers can be used together with codespaces. + + + +## How do I use it? + +Using the codespace is obviously much easier than setting it up. +Just navigate to your repository and click "Code" -> "Open with Codespaces". It should take a few seconds to load, and you're ready to go. + +:::info + +If you really like the experience, you can add a badge to your readme, links to existing codespaces, and more. +Check out the [official docs](https://docs.github.com/en/codespaces/setting-up-your-project-for-codespaces/setting-up-your-repository/facilitating-quick-creation-and-resumption-of-codespaces) for more info. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/index.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/index.mdx new file mode 100644 index 00000000000..75086ddcdde --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/index.mdx @@ -0,0 +1,67 @@ +--- +title: Noir Lang +hide_title: true +description: + Learn about the public alpha release of Noir, a domain specific language heavily influenced by Rust that compiles to + an intermediate language which can be compiled to an arithmetic circuit or a rank-1 constraint system. +keywords: + [Noir, + Domain Specific Language, + Rust, + Intermediate Language, + Arithmetic Circuit, + Rank-1 Constraint System, + Ethereum Developers, + Protocol Developers, + Blockchain Developers, + Proving System, + Smart Contract Language] +sidebar_position: 0 +--- + +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +Noir Logo + +Noir is a Domain-Specific Language for SNARK proving systems developed by [Aztec Labs](https://aztec.network/). It allows you to generate complex Zero-Knowledge Programs (ZKP) by using simple and flexible syntax, requiring no previous knowledge on the underlying mathematics or cryptography. + +ZK programs are programs that can generate short proofs of a certain statement without revealing some details about it. You can read more about ZKPs [here](https://dev.to/spalladino/a-beginners-intro-to-coding-zero-knowledge-proofs-c56). + +## What's new about Noir? + +Noir works differently from most ZK languages by taking a two-pronged path. First, it compiles the program to an adaptable intermediate language known as ACIR. From there, depending on a given project's needs, ACIR can be further compiled into an arithmetic circuit for integration with the proving backend. + +:::info + +Noir is backend agnostic, which means it makes no assumptions on which proving backend powers the ZK proof. Being the language that powers [Aztec Contracts](https://docs.aztec.network/developers/contracts/main), it defaults to Aztec's Barretenberg proving backend. + +However, the ACIR output can be transformed to be compatible with other PLONK-based backends, or into a [rank-1 constraint system](https://www.rareskills.io/post/rank-1-constraint-system) suitable for backends such as Arkwork's Marlin. + +::: + +## Who is Noir for? + +Noir can be used both in complex cloud-based backends and in user's smartphones, requiring no knowledge on the underlying math or cryptography. From authorization systems that keep a password in the user's device, to complex on-chain verification of recursive proofs, Noir is designed to abstract away complexity without any significant overhead. Here are some examples of situations where Noir can be used: + + + + Noir Logo + + Aztec Contracts leverage Noir to allow for the storage and execution of private information. Writing an Aztec Contract is as easy as writing Noir, and Aztec developers can easily interact with the network storage and execution through the [Aztec.nr](https://docs.aztec.network/developers/contracts/main) library. + + + Soliditry Verifier Example + Noir can auto-generate Solidity verifier contracts that verify Noir proofs. This allows for non-interactive verification of proofs containing private information in an immutable system. This feature powers a multitude of use-case scenarios, from P2P chess tournaments, to [Aztec Layer-2 Blockchain](https://docs.aztec.network/) + + + Aztec Labs developed NoirJS, an easy interface to generate and verify Noir proofs in a Javascript environment. This allows for Noir to be used in webpages, mobile apps, games, and any other environment supporting JS execution in a standalone manner. + + + + +## Libraries + +Noir is meant to be easy to extend by simply importing Noir libraries just like in Rust. +The [awesome-noir repo](https://github.com/noir-lang/awesome-noir#libraries) is a collection of libraries developed by the Noir community. +Writing a new library is easy and makes code be composable and easy to reuse. See the section on [dependencies](noir/modules_packages_crates/dependencies.md) for more information. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/migration_notes.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/migration_notes.md new file mode 100644 index 00000000000..6bd740024e5 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/migration_notes.md @@ -0,0 +1,105 @@ +--- +title: Migration notes +description: Read about migration notes from previous versions, which could solve problems while updating +keywords: [Noir, notes, migration, updating, upgrading] +--- + +Noir is in full-speed development. Things break fast, wild, and often. This page attempts to leave some notes on errors you might encounter when upgrading and how to resolve them until proper patches are built. + +### `backend encountered an error: libc++.so.1` + +Depending on your OS, you may encounter the following error when running `nargo prove` for the first time: + +```text +The backend encountered an error: "/home/codespace/.nargo/backends/acvm-backend-barretenberg/backend_binary: error while loading shared libraries: libc++.so.1: cannot open shared object file: No such file or directory\n" +``` + +Install the `libc++-dev` library with: + +```bash +sudo apt install libc++-dev +``` + +## ≥0.19 + +### Enforcing `compiler_version` + +From this version on, the compiler will check for the `compiler_version` field in `Nargo.toml`, and will error if it doesn't match the current Nargo version in use. + +To update, please make sure this field in `Nargo.toml` matches the output of `nargo --version`. + +## ≥0.14 + +The index of the [for loops](noir/concepts/control_flow.md#loops) is now of type `u64` instead of `Field`. An example refactor would be: + +```rust +for i in 0..10 { + let i = i as Field; +} +``` + +## ≥v0.11.0 and Nargo backend + +From this version onwards, Nargo starts managing backends through the `nargo backend` command. Upgrading to the versions per usual steps might lead to: + +### `backend encountered an error` + +This is likely due to the existing locally installed version of proving backend (e.g. barretenberg) is incompatible with the version of Nargo in use. + +To fix the issue: + +1. Uninstall the existing backend + +```bash +nargo backend uninstall acvm-backend-barretenberg +``` + +You may replace _acvm-backend-barretenberg_ with the name of your backend listed in `nargo backend ls` or in ~/.nargo/backends. + +2. Reinstall a compatible version of the proving backend. + +If you are using the default barretenberg backend, simply run: + +``` +nargo prove +``` + +with your Noir program. + +This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. + +### `backend encountered an error: illegal instruction` + +On certain Intel-based systems, an `illegal instruction` error may arise due to incompatibility of barretenberg with certain CPU instructions. + +To fix the issue: + +1. Uninstall the existing backend + +```bash +nargo backend uninstall acvm-backend-barretenberg +``` + +You may replace _acvm-backend-barretenberg_ with the name of your backend listed in `nargo backend ls` or in ~/.nargo/backends. + +2. Reinstall a compatible version of the proving backend. + +If you are using the default barretenberg backend, simply run: + +``` +nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/barretenberg-js-binary/raw/master/run-bb.tar.gz +``` + +This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. + +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. + +Then run: + +``` +DESIRED_BINARY_VERSION=0.8.1 nargo info +``` + +This overrides the bb native binary with a bb.js node application instead, which should be compatible with most if not all hardware. This does come with the drawback of being generally slower than native binary. + +0.8.1 indicates bb.js version 0.8.1, so if you change that it will update to a different version or the default version in the script if none was supplied. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/_category_.json new file mode 100644 index 00000000000..7da08f8a8c5 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Concepts", + "position": 0, + "collapsible": true, + "collapsed": true +} \ No newline at end of file diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/assert.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/assert.md new file mode 100644 index 00000000000..bcff613a695 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/assert.md @@ -0,0 +1,45 @@ +--- +title: Assert Function +description: + Learn about the assert function in Noir, which can be used to explicitly constrain the predicate or + comparison expression that follows to be true, and what happens if the expression is false at + runtime. +keywords: [Noir programming language, assert statement, predicate expression, comparison expression] +sidebar_position: 4 +--- + +Noir includes a special `assert` function which will explicitly constrain the predicate/comparison +expression that follows to be true. If this expression is false at runtime, the program will fail to +be proven. Example: + +```rust +fn main(x : Field, y : Field) { + assert(x == y); +} +``` + +> Assertions only work for predicate operations, such as `==`. If there's any ambiguity on the operation, the program will fail to compile. For example, it is unclear if `assert(x + y)` would check for `x + y == 0` or simply would return `true`. + +You can optionally provide a message to be logged when the assertion fails: + +```rust +assert(x == y, "x and y are not equal"); +``` + +Aside string literals, the optional message can be a format string or any other type supported as input for Noir's [print](../standard_library/logging.md) functions. This feature lets you incorporate runtime variables into your failed assertion logs: + +```rust +assert(x == y, f"Expected x == y, but got {x} == {y}"); +``` + +Using a variable as an assertion message directly: + +```rust +struct myStruct { + myField: Field +} + +let s = myStruct { myField: y }; +assert(s.myField == x, s); +``` + diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/comments.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/comments.md new file mode 100644 index 00000000000..b51a85f5c94 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/comments.md @@ -0,0 +1,33 @@ +--- +title: Comments +description: + Learn how to write comments in Noir programming language. A comment is a line of code that is + ignored by the compiler, but it can be read by programmers. Single-line and multi-line comments + are supported in Noir. +keywords: [Noir programming language, comments, single-line comments, multi-line comments] +sidebar_position: 10 +--- + +A comment is a line in your codebase which the compiler ignores, however it can be read by +programmers. + +Here is a single line comment: + +```rust +// This is a comment and is ignored +``` + +`//` is used to tell the compiler to ignore the rest of the line. + +Noir also supports multi-line block comments. Start a block comment with `/*` and end the block with `*/`. + +Noir does not natively support doc comments. You may be able to use [Rust doc comments](https://doc.rust-lang.org/reference/comments.html) in your code to leverage some Rust documentation build tools with Noir code. + +```rust +/* + This is a block comment describing a complex function. +*/ +fn main(x : Field, y : pub Field) { + assert(x != y); +} +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/control_flow.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/control_flow.md new file mode 100644 index 00000000000..045d3c3a5f5 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/control_flow.md @@ -0,0 +1,77 @@ +--- +title: Control Flow +description: + Learn how to use loops and if expressions in the Noir programming language. Discover the syntax + and examples for for loops and if-else statements. +keywords: [Noir programming language, loops, for loop, if-else statements, Rust syntax] +sidebar_position: 2 +--- + +## If Expressions + +Noir supports `if-else` statements. The syntax is most similar to Rust's where it is not required +for the statement's conditional to be surrounded by parentheses. + +```rust +let a = 0; +let mut x: u32 = 0; + +if a == 0 { + if a != 0 { + x = 6; + } else { + x = 2; + } +} else { + x = 5; + assert(x == 5); +} +assert(x == 2); +``` + +## Loops + +Noir has one kind of loop: the `for` loop. `for` loops allow you to repeat a block of code multiple +times. + +The following block of code between the braces is run 10 times. + +```rust +for i in 0..10 { + // do something +} +``` + +The index for loops is of type `u64`. + +### Break and Continue + +In unconstrained code, `break` and `continue` are also allowed in `for` loops. These are only allowed +in unconstrained code since normal constrained code requires that Noir knows exactly how many iterations +a loop may have. `break` and `continue` can be used like so: + +```rust +for i in 0 .. 10 { + println("Iteration start") + + if i == 2 { + continue; + } + + if i == 5 { + break; + } + + println(i); +} +println("Loop end") +``` + +When used, `break` will end the current loop early and jump to the statement after the for loop. In the example +above, the `break` will stop the loop and jump to the `println("Loop end")`. + +`continue` will stop the current iteration of the loop, and jump to the start of the next iteration. In the example +above, `continue` will jump to `println("Iteration start")` when used. Note that the loop continues as normal after this. +The iteration variable `i` is still increased by one as normal when `continue` is used. + +`break` and `continue` cannot currently be used to jump out of more than a single loop at a time. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_bus.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_bus.md new file mode 100644 index 00000000000..e54fc861257 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_bus.md @@ -0,0 +1,21 @@ +--- +title: Data Bus +sidebar_position: 13 +--- +**Disclaimer** this feature is experimental, do not use it! + +The data bus is an optimization that the backend can use to make recursion more efficient. +In order to use it, you must define some inputs of the program entry points (usually the `main()` +function) with the `call_data` modifier, and the return values with the `return_data` modifier. +These modifiers are incompatible with `pub` and `mut` modifiers. + +## Example + +```rust +fn main(mut x: u32, y: call_data u32, z: call_data [u32;4] ) -> return_data u32 { + let a = z[x]; + a+y +} +``` + +As a result, both call_data and return_data will be treated as private inputs and encapsulated into a read-only array each, for the backend to process. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/_category_.json new file mode 100644 index 00000000000..5d694210bbf --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 0, + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/arrays.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/arrays.md new file mode 100644 index 00000000000..efce3e95d32 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/arrays.md @@ -0,0 +1,251 @@ +--- +title: Arrays +description: + Dive into the Array data type in Noir. Grasp its methods, practical examples, and best practices for efficiently using Arrays in your Noir code. +keywords: + [ + noir, + array type, + methods, + examples, + indexing, + ] +sidebar_position: 4 +--- + +An array is one way of grouping together values into one compound type. Array types can be inferred +or explicitly specified via the syntax `[; ]`: + +```rust +fn main(x : Field, y : Field) { + let my_arr = [x, y]; + let your_arr: [Field; 2] = [x, y]; +} +``` + +Here, both `my_arr` and `your_arr` are instantiated as an array containing two `Field` elements. + +Array elements can be accessed using indexing: + +```rust +fn main() { + let a = [1, 2, 3, 4, 5]; + + let first = a[0]; + let second = a[1]; +} +``` + +All elements in an array must be of the same type (i.e. homogeneous). That is, an array cannot group +a `Field` value and a `u8` value together for example. + +You can write mutable arrays, like: + +```rust +fn main() { + let mut arr = [1, 2, 3, 4, 5]; + assert(arr[0] == 1); + + arr[0] = 42; + assert(arr[0] == 42); +} +``` + +You can instantiate a new array of a fixed size with the same value repeated for each element. The following example instantiates an array of length 32 where each element is of type Field and has the value 0. + +```rust +let array: [Field; 32] = [0; 32]; +``` + +Like in Rust, arrays in Noir are a fixed size. However, if you wish to convert an array to a [slice](./slices), you can just call `as_slice` on your array: + +```rust +let array: [Field; 32] = [0; 32]; +let sl = array.as_slice() +``` + +You can define multidimensional arrays: + +```rust +let array : [[Field; 2]; 2]; +let element = array[0][0]; +``` +However, multidimensional slices are not supported. For example, the following code will error at compile time: +```rust +let slice : [[Field]] = &[]; +``` + +## Types + +You can create arrays of primitive types or structs. There is not yet support for nested arrays +(arrays of arrays) or arrays of structs that contain arrays. + +## Methods + +For convenience, the STD provides some ready-to-use, common methods for arrays. +Each of these functions are located within the generic impl `impl [T; N] {`. +So anywhere `self` appears, it refers to the variable `self: [T; N]`. + +### len + +Returns the length of an array + +```rust +fn len(self) -> Field +``` + +example + +```rust +fn main() { + let array = [42, 42]; + assert(array.len() == 2); +} +``` + +### sort + +Returns a new sorted array. The original array remains untouched. Notice that this function will +only work for arrays of fields or integers, not for any arbitrary type. This is because the sorting +logic it uses internally is optimized specifically for these values. If you need a sort function to +sort any type, you should use the function `sort_via` described below. + +```rust +fn sort(self) -> [T; N] +``` + +example + +```rust +fn main() { + let arr = [42, 32]; + let sorted = arr.sort(); + assert(sorted == [32, 42]); +} +``` + +### sort_via + +Sorts the array with a custom comparison function + +```rust +fn sort_via(self, ordering: fn(T, T) -> bool) -> [T; N] +``` + +example + +```rust +fn main() { + let arr = [42, 32] + let sorted_ascending = arr.sort_via(|a, b| a < b); + assert(sorted_ascending == [32, 42]); // verifies + + let sorted_descending = arr.sort_via(|a, b| a > b); + assert(sorted_descending == [32, 42]); // does not verify +} +``` + +### map + +Applies a function to each element of the array, returning a new array containing the mapped elements. + +```rust +fn map(self, f: fn(T) -> U) -> [U; N] +``` + +example + +```rust +let a = [1, 2, 3]; +let b = a.map(|a| a * 2); // b is now [2, 4, 6] +``` + +### fold + +Applies a function to each element of the array, returning the final accumulated value. The first +parameter is the initial value. + +```rust +fn fold(self, mut accumulator: U, f: fn(U, T) -> U) -> U +``` + +This is a left fold, so the given function will be applied to the accumulator and first element of +the array, then the second, and so on. For a given call the expected result would be equivalent to: + +```rust +let a1 = [1]; +let a2 = [1, 2]; +let a3 = [1, 2, 3]; + +let f = |a, b| a - b; +a1.fold(10, f) //=> f(10, 1) +a2.fold(10, f) //=> f(f(10, 1), 2) +a3.fold(10, f) //=> f(f(f(10, 1), 2), 3) +``` + +example: + +```rust + +fn main() { + let arr = [2, 2, 2, 2, 2]; + let folded = arr.fold(0, |a, b| a + b); + assert(folded == 10); +} + +``` + +### reduce + +Same as fold, but uses the first element as starting element. + +```rust +fn reduce(self, f: fn(T, T) -> T) -> T +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 2]; + let reduced = arr.reduce(|a, b| a + b); + assert(reduced == 10); +} +``` + +### all + +Returns true if all the elements satisfy the given predicate + +```rust +fn all(self, predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 2]; + let all = arr.all(|a| a == 2); + assert(all); +} +``` + +### any + +Returns true if any of the elements satisfy the given predicate + +```rust +fn any(self, predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 5]; + let any = arr.any(|a| a == 5); + assert(any); +} + +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/booleans.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/booleans.md new file mode 100644 index 00000000000..69826fcd724 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/booleans.md @@ -0,0 +1,31 @@ +--- +title: Booleans +description: + Delve into the Boolean data type in Noir. Understand its methods, practical examples, and best practices for using Booleans in your Noir programs. +keywords: + [ + noir, + boolean type, + methods, + examples, + logical operations, + ] +sidebar_position: 2 +--- + + +The `bool` type in Noir has two possible values: `true` and `false`: + +```rust +fn main() { + let t = true; + let f: bool = false; +} +``` + +> **Note:** When returning a boolean value, it will show up as a value of 1 for `true` and 0 for +> `false` in _Verifier.toml_. + +The boolean type is most commonly used in conditionals like `if` expressions and `assert` +statements. More about conditionals is covered in the [Control Flow](../control_flow) and +[Assert Function](../assert) sections. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/fields.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/fields.md new file mode 100644 index 00000000000..a10a4810788 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/fields.md @@ -0,0 +1,192 @@ +--- +title: Fields +description: + Dive deep into the Field data type in Noir. Understand its methods, practical examples, and best practices to effectively use Fields in your Noir programs. +keywords: + [ + noir, + field type, + methods, + examples, + best practices, + ] +sidebar_position: 0 +--- + +The field type corresponds to the native field type of the proving backend. + +The size of a Noir field depends on the elliptic curve's finite field for the proving backend +adopted. For example, a field would be a 254-bit integer when paired with the default backend that +spans the Grumpkin curve. + +Fields support integer arithmetic and are often used as the default numeric type in Noir: + +```rust +fn main(x : Field, y : Field) { + let z = x + y; +} +``` + +`x`, `y` and `z` are all private fields in this example. Using the `let` keyword we defined a new +private value `z` constrained to be equal to `x + y`. + +If proving efficiency is of priority, fields should be used as a default for solving problems. +Smaller integer types (e.g. `u64`) incur extra range constraints. + +## Methods + +After declaring a Field, you can use these common methods on it: + +### to_le_bits + +Transforms the field into an array of bits, Little Endian. + +```rust +fn to_le_bits(_x : Field, _bit_size: u32) -> [u1] +``` + +example: + +```rust +fn main() { + let field = 2; + let bits = field.to_le_bits(32); +} +``` + +### to_be_bits + +Transforms the field into an array of bits, Big Endian. + +```rust +fn to_be_bits(_x : Field, _bit_size: u32) -> [u1] +``` + +example: + +```rust +fn main() { + let field = 2; + let bits = field.to_be_bits(32); +} +``` + +### to_le_bytes + +Transforms into an array of bytes, Little Endian + +```rust +fn to_le_bytes(_x : Field, byte_size: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let bytes = field.to_le_bytes(4); +} +``` + +### to_be_bytes + +Transforms into an array of bytes, Big Endian + +```rust +fn to_be_bytes(_x : Field, byte_size: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let bytes = field.to_be_bytes(4); +} +``` + +### to_le_radix + +Decomposes into a vector over the specified base, Little Endian + +```rust +fn to_le_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let radix = field.to_le_radix(256, 4); +} +``` + +### to_be_radix + +Decomposes into a vector over the specified base, Big Endian + +```rust +fn to_be_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let radix = field.to_be_radix(256, 4); +} +``` + +### pow_32 + +Returns the value to the power of the specified exponent + +```rust +fn pow_32(self, exponent: Field) -> Field +``` + +example: + +```rust +fn main() { + let field = 2 + let pow = field.pow_32(4); + assert(pow == 16); +} +``` + +### assert_max_bit_size + +Adds a constraint to specify that the field can be represented with `bit_size` number of bits + +```rust +fn assert_max_bit_size(self, bit_size: u32) +``` + +example: + +```rust +fn main() { + let field = 2 + field.assert_max_bit_size(32); +} +``` + +### sgn0 + +Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ \{0, ..., p-1\} is even, otherwise sgn0(x mod p) = 1. + +```rust +fn sgn0(self) -> u1 +``` + + +### lt + +Returns true if the field is less than the other field + +```rust +pub fn lt(self, another: Field) -> bool +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/function_types.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/function_types.md new file mode 100644 index 00000000000..f6121af17e2 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/function_types.md @@ -0,0 +1,26 @@ +--- +title: Function types +sidebar_position: 10 +--- + +Noir supports higher-order functions. The syntax for a function type is as follows: + +```rust +fn(arg1_type, arg2_type, ...) -> return_type +``` + +Example: + +```rust +fn assert_returns_100(f: fn() -> Field) { // f takes no args and returns a Field + assert(f() == 100); +} + +fn main() { + assert_returns_100(|| 100); // ok + assert_returns_100(|| 150); // fails +} +``` + +A function type also has an optional capture environment - this is necessary to support closures. +See [Lambdas](../lambdas.md) for more details. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/index.md new file mode 100644 index 00000000000..357813c147a --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/index.md @@ -0,0 +1,110 @@ +--- +title: Data Types +description: + Get a clear understanding of the two categories of Noir data types - primitive types and compound + types. Learn about their characteristics, differences, and how to use them in your Noir + programming. +keywords: + [ + noir, + data types, + primitive types, + compound types, + private types, + public types, + ] +--- + +Every value in Noir has a type, which determines which operations are valid for it. + +All values in Noir are fundamentally composed of `Field` elements. For a more approachable +developing experience, abstractions are added on top to introduce different data types in Noir. + +Noir has two category of data types: primitive types (e.g. `Field`, integers, `bool`) and compound +types that group primitive types (e.g. arrays, tuples, structs). Each value can either be private or +public. + +## Private & Public Types + +A **private value** is known only to the Prover, while a **public value** is known by both the +Prover and Verifier. Mark values as `private` when the value should only be known to the prover. All +primitive types (including individual fields of compound types) in Noir are private by default, and +can be marked public when certain values are intended to be revealed to the Verifier. + +> **Note:** For public values defined in Noir programs paired with smart contract verifiers, once +> the proofs are verified on-chain the values can be considered known to everyone that has access to +> that blockchain. + +Public data types are treated no differently to private types apart from the fact that their values +will be revealed in proofs generated. Simply changing the value of a public type will not change the +circuit (where the same goes for changing values of private types as well). + +_Private values_ are also referred to as _witnesses_ sometimes. + +> **Note:** The terms private and public when applied to a type (e.g. `pub Field`) have a different +> meaning than when applied to a function (e.g. `pub fn foo() {}`). +> +> The former is a visibility modifier for the Prover to interpret if a value should be made known to +> the Verifier, while the latter is a visibility modifier for the compiler to interpret if a +> function should be made accessible to external Noir programs like in other languages. + +### pub Modifier + +All data types in Noir are private by default. Types are explicitly declared as public using the +`pub` modifier: + +```rust +fn main(x : Field, y : pub Field) -> pub Field { + x + y +} +``` + +In this example, `x` is **private** while `y` and `x + y` (the return value) are **public**. Note +that visibility is handled **per variable**, so it is perfectly valid to have one input that is +private and another that is public. + +> **Note:** Public types can only be declared through parameters on `main`. + +## Type Aliases + +A type alias is a new name for an existing type. Type aliases are declared with the keyword `type`: + +```rust +type Id = u8; + +fn main() { + let id: Id = 1; + let zero: u8 = 0; + assert(zero + 1 == id); +} +``` + +Type aliases can also be used with [generics](../generics.md): + +```rust +type Id = Size; + +fn main() { + let id: Id = 1; + let zero: u32 = 0; + assert(zero + 1 == id); +} +``` + +Type aliases can even refer to other aliases. An error will be issued if they form a cycle: + +```rust +// Ok! +type A = B; +type B = Field; + +type Bad1 = Bad2; + +// error: Dependency cycle found +type Bad2 = Bad1; +// ^^^^^^^^^^^ 'Bad2' recursively depends on itself: Bad2 -> Bad1 -> Bad2 +``` + +### BigInt + +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/integers.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/integers.md new file mode 100644 index 00000000000..1c6b375db49 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/integers.md @@ -0,0 +1,155 @@ +--- +title: Integers +description: Explore the Integer data type in Noir. Learn about its methods, see real-world examples, and grasp how to efficiently use Integers in your Noir code. +keywords: [noir, integer types, methods, examples, arithmetic] +sidebar_position: 1 +--- + +An integer type is a range constrained field type. The Noir frontend supports both unsigned and signed integer types. The allowed sizes are 1, 8, 32 and 64 bits. + +:::info + +When an integer is defined in Noir without a specific type, it will default to `Field`. + +The one exception is for loop indices which default to `u64` since comparisons on `Field`s are not possible. + +::: + +## Unsigned Integers + +An unsigned integer type is specified first with the letter `u` (indicating its unsigned nature) followed by its bit size (e.g. `8`): + +```rust +fn main() { + let x: u8 = 1; + let y: u8 = 1; + let z = x + y; + assert (z == 2); +} +``` + +The bit size determines the maximum value the integer type can store. For example, a `u8` variable can store a value in the range of 0 to 255 (i.e. $\\2^{8}-1\\$). + +## Signed Integers + +A signed integer type is specified first with the letter `i` (which stands for integer) followed by its bit size (e.g. `8`): + +```rust +fn main() { + let x: i8 = -1; + let y: i8 = -1; + let z = x + y; + assert (z == -2); +} +``` + +The bit size determines the maximum and minimum range of value the integer type can store. For example, an `i8` variable can store a value in the range of -128 to 127 (i.e. $\\-2^{7}\\$ to $\\2^{7}-1\\$). + +## 128 bits Unsigned Integers + +The built-in structure `U128` allows you to use 128-bit unsigned integers almost like a native integer type. However, there are some differences to keep in mind: +- You cannot cast between a native integer and `U128` +- There is a higher performance cost when using `U128`, compared to a native type. + +Conversion between unsigned integer types and U128 are done through the use of `from_integer` and `to_integer` functions. `from_integer` also accepts the `Field` type as input. + +```rust +fn main() { + let x = U128::from_integer(23); + let y = U128::from_hex("0x7"); + let z = x + y; + assert(z.to_integer() == 30); +} +``` + +`U128` is implemented with two 64 bits limbs, representing the low and high bits, which explains the performance cost. You should expect `U128` to be twice more costly for addition and four times more costly for multiplication. +You can construct a U128 from its limbs: +```rust +fn main(x: u64, y: u64) { + let x = U128::from_u64s_be(x,y); + assert(z.hi == x as Field); + assert(z.lo == y as Field); +} +``` + +Note that the limbs are stored as Field elements in order to avoid unnecessary conversions. +Apart from this, most operations will work as usual: + +```rust +fn main(x: U128, y: U128) { + // multiplication + let c = x * y; + // addition and subtraction + let c = c - x + y; + // division + let c = x / y; + // bit operation; + let c = x & y | y; + // bit shift + let c = x << y; + // comparisons; + let c = x < y; + let c = x == y; +} +``` + +## Overflows + +Computations that exceed the type boundaries will result in overflow errors. This happens with both signed and unsigned integers. For example, attempting to prove: + +```rust +fn main(x: u8, y: u8) { + let z = x + y; +} +``` + +With: + +```toml +x = "255" +y = "1" +``` + +Would result in: + +``` +$ nargo prove +error: Assertion failed: 'attempt to add with overflow' +┌─ ~/src/main.nr:9:13 +│ +│ let z = x + y; +│ ----- +│ += Call stack: + ... +``` + +A similar error would happen with signed integers: + +```rust +fn main() { + let x: i8 = -118; + let y: i8 = -11; + let z = x + y; +} +``` + +### Wrapping methods + +Although integer overflow is expected to error, some use-cases rely on wrapping. For these use-cases, the standard library provides `wrapping` variants of certain common operations: + +```rust +fn wrapping_add(x: T, y: T) -> T; +fn wrapping_sub(x: T, y: T) -> T; +fn wrapping_mul(x: T, y: T) -> T; +``` + +Example of how it is used: + +```rust +use dep::std; + +fn main(x: u8, y: u8) -> pub u8 { + std::wrapping_add(x, y) +} +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/references.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/references.md new file mode 100644 index 00000000000..a5293d11cfb --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/references.md @@ -0,0 +1,23 @@ +--- +title: References +sidebar_position: 9 +--- + +Noir supports first-class references. References are a bit like pointers: they point to a specific address that can be followed to access the data stored at that address. You can use Rust-like syntax to use pointers in Noir: the `&` operator references the variable, the `*` operator dereferences it. + +Example: + +```rust +fn main() { + let mut x = 2; + + // you can reference x as &mut and pass it to multiplyBy2 + multiplyBy2(&mut x); +} + +// you can access &mut here +fn multiplyBy2(x: &mut Field) { + // and dereference it with * + *x = *x * 2; +} +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/slices.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/slices.mdx new file mode 100644 index 00000000000..4eccc677b80 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/slices.mdx @@ -0,0 +1,195 @@ +--- +title: Slices +description: Explore the Slice data type in Noir. Understand its methods, see real-world examples, and learn how to effectively use Slices in your Noir programs. +keywords: [noir, slice type, methods, examples, subarrays] +sidebar_position: 5 +--- + +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + + +A slice is a dynamically-sized view into a sequence of elements. They can be resized at runtime, but because they don't own the data, they cannot be returned from a circuit. You can treat slices as arrays without a constrained size. + +```rust +use dep::std::slice; + +fn main() -> pub Field { + let mut slice: [Field] = &[0; 2]; + + let mut new_slice = slice.push_back(6); + new_slice.len() +} +``` + +To write a slice literal, use a preceeding ampersand as in: `&[0; 2]` or +`&[1, 2, 3]`. + +It is important to note that slices are not references to arrays. In Noir, +`&[..]` is more similar to an immutable, growable vector. + +View the corresponding test file [here][test-file]. + +[test-file]: https://github.com/noir-lang/noir/blob/f387ec1475129732f72ba294877efdf6857135ac/crates/nargo_cli/tests/test_data_ssa_refactor/slices/src/main.nr + +## Methods + +For convenience, the STD provides some ready-to-use, common methods for slices: + +### push_back + +Pushes a new element to the end of the slice, returning a new slice with a length one greater than the original unmodified slice. + +```rust +fn push_back(_self: [T], _elem: T) -> [T] +``` + +example: + +```rust +fn main() -> pub Field { + let mut slice: [Field] = &[0; 2]; + + let mut new_slice = slice.push_back(6); + new_slice.len() +} +``` + +View the corresponding test file [here][test-file]. + +### push_front + +Returns a new array with the specified element inserted at index 0. The existing elements indexes are incremented by 1. + +```rust +fn push_front(_self: Self, _elem: T) -> Self +``` + +Example: + +```rust +let mut new_slice: [Field] = &[]; +new_slice = new_slice.push_front(20); +assert(new_slice[0] == 20); // returns true +``` + +View the corresponding test file [here][test-file]. + +### pop_front + +Returns a tuple of two items, the first element of the array and the rest of the array. + +```rust +fn pop_front(_self: Self) -> (T, Self) +``` + +Example: + +```rust +let (first_elem, rest_of_slice) = slice.pop_front(); +``` + +View the corresponding test file [here][test-file]. + +### pop_back + +Returns a tuple of two items, the beginning of the array with the last element omitted and the last element. + +```rust +fn pop_back(_self: Self) -> (Self, T) +``` + +Example: + +```rust +let (popped_slice, last_elem) = slice.pop_back(); +``` + +View the corresponding test file [here][test-file]. + +### append + +Loops over a slice and adds it to the end of another. + +```rust +fn append(mut self, other: Self) -> Self +``` + +Example: + +```rust +let append = &[1, 2].append(&[3, 4, 5]); +``` + +### insert + +Inserts an element at a specified index and shifts all following elements by 1. + +```rust +fn insert(_self: Self, _index: Field, _elem: T) -> Self +``` + +Example: + +```rust +new_slice = rest_of_slice.insert(2, 100); +assert(new_slice[2] == 100); +``` + +View the corresponding test file [here][test-file]. + +### remove + +Remove an element at a specified index, shifting all elements after it to the left, returning the altered slice and the removed element. + +```rust +fn remove(_self: Self, _index: Field) -> (Self, T) +``` + +Example: + +```rust +let (remove_slice, removed_elem) = slice.remove(3); +``` + +### len + +Returns the length of a slice + +```rust +fn len(self) -> Field +``` + +Example: + +```rust +fn main() { + let slice = &[42, 42]; + assert(slice.len() == 2); +} +``` + +### as_array + +Converts this slice into an array. + +Make sure to specify the size of the resulting array. +Panics if the resulting array length is different than the slice's length. + +```rust +fn as_array(self) -> [T; N] +``` + +Example: + +```rust +fn main() { + let slice = &[5, 6]; + + // Always specify the length of the resulting array! + let array: [Field; 2] = slice.as_array(); + + assert(array[0] == slice[0]); + assert(array[1] == slice[1]); +} +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/strings.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/strings.md new file mode 100644 index 00000000000..311dfd64416 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/strings.md @@ -0,0 +1,80 @@ +--- +title: Strings +description: + Discover the String data type in Noir. Learn about its methods, see real-world examples, and understand how to effectively manipulate and use Strings in Noir. +keywords: + [ + noir, + string type, + methods, + examples, + concatenation, + ] +sidebar_position: 3 +--- + + +The string type is a fixed length value defined with `str`. + +You can use strings in `assert()` functions or print them with +`println()`. See more about [Logging](../../standard_library/logging). + +```rust +use dep::std; + +fn main(message : pub str<11>, hex_as_string : str<4>) { + println(message); + assert(message == "hello world"); + assert(hex_as_string == "0x41"); +} +``` + +You can convert a `str` to a byte array by calling `as_bytes()` +or a vector by calling `as_bytes_vec()`. + +```rust +fn main() { + let message = "hello world"; + let message_bytes = message.as_bytes(); + let mut message_vec = message.as_bytes_vec(); + assert(message_bytes.len() == 11); + assert(message_bytes[0] == 104); + assert(message_bytes[0] == message_vec.get(0)); +} +``` + +## Escape characters + +You can use escape characters for your strings: + +| Escape Sequence | Description | +|-----------------|-----------------| +| `\r` | Carriage Return | +| `\n` | Newline | +| `\t` | Tab | +| `\0` | Null Character | +| `\"` | Double Quote | +| `\\` | Backslash | + +Example: + +```rust +let s = "Hello \"world" // prints "Hello "world" +let s = "hey \tyou"; // prints "hey you" +``` + +## Raw strings + +A raw string begins with the letter `r` and is optionally delimited by a number of hashes `#`. + +Escape characters are *not* processed within raw strings. All contents are interpreted literally. + +Example: + +```rust +let s = r"Hello world"; +let s = r#"Simon says "hello world""#; + +// Any number of hashes may be used (>= 1) as long as the string also terminates with the same number of hashes +let s = r#####"One "#, Two "##, Three "###, Four "####, Five will end the string."#####; +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/structs.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/structs.md new file mode 100644 index 00000000000..dbf68c99813 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/structs.md @@ -0,0 +1,70 @@ +--- +title: Structs +description: + Explore the Struct data type in Noir. Learn about its methods, see real-world examples, and grasp how to effectively define and use Structs in your Noir programs. +keywords: + [ + noir, + struct type, + methods, + examples, + data structures, + ] +sidebar_position: 8 +--- + +A struct also allows for grouping multiple values of different types. Unlike tuples, we can also +name each field. + +> **Note:** The usage of _field_ here refers to each element of the struct and is unrelated to the +> field type of Noir. + +Defining a struct requires giving it a name and listing each field within as `: ` pairs: + +```rust +struct Animal { + hands: Field, + legs: Field, + eyes: u8, +} +``` + +An instance of a struct can then be created with actual values in `: ` pairs in any +order. Struct fields are accessible using their given names: + +```rust +fn main() { + let legs = 4; + + let dog = Animal { + eyes: 2, + hands: 0, + legs, + }; + + let zero = dog.hands; +} +``` + +Structs can also be destructured in a pattern, binding each field to a new variable: + +```rust +fn main() { + let Animal { hands, legs: feet, eyes } = get_octopus(); + + let ten = hands + feet + eyes as u8; +} + +fn get_octopus() -> Animal { + let octopus = Animal { + hands: 0, + legs: 8, + eyes: 2, + }; + + octopus +} +``` + +The new variables can be bound with names different from the original struct field names, as +showcased in the `legs --> feet` binding in the example above. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/tuples.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/tuples.md new file mode 100644 index 00000000000..2ec5c9c4113 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/data_types/tuples.md @@ -0,0 +1,48 @@ +--- +title: Tuples +description: + Dive into the Tuple data type in Noir. Understand its methods, practical examples, and best practices for efficiently using Tuples in your Noir code. +keywords: + [ + noir, + tuple type, + methods, + examples, + multi-value containers, + ] +sidebar_position: 7 +--- + +A tuple collects multiple values like an array, but with the added ability to collect values of +different types: + +```rust +fn main() { + let tup: (u8, u64, Field) = (255, 500, 1000); +} +``` + +One way to access tuple elements is via destructuring using pattern matching: + +```rust +fn main() { + let tup = (1, 2); + + let (one, two) = tup; + + let three = one + two; +} +``` + +Another way to access tuple elements is via direct member access, using a period (`.`) followed by +the index of the element we want to access. Index `0` corresponds to the first tuple element, `1` to +the second and so on: + +```rust +fn main() { + let tup = (5, 6, 7, 8); + + let five = tup.0; + let eight = tup.3; +} +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/distinct.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/distinct.md new file mode 100644 index 00000000000..6c993b8b5e0 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/distinct.md @@ -0,0 +1,64 @@ +--- +title: Distinct Witnesses +sidebar_position: 11 +--- + +The `distinct` keyword prevents repetitions of witness indices in the program's ABI. This ensures +that the witnesses being returned as public inputs are all unique. + +The `distinct` keyword is only used for return values on program entry points (usually the `main()` +function). + +When using `distinct` and `pub` simultaneously, `distinct` comes first. See the example below. + +You can read more about the problem this solves +[here](https://github.com/noir-lang/noir/issues/1183). + +## Example + +Without the `distinct` keyword, the following program + +```rust +fn main(x : pub Field, y : pub Field) -> pub [Field; 4] { + let a = 1; + let b = 1; + [x + 1, y, a, b] +} +``` + +compiles to + +```json +{ + //... + "abi": { + //... + "param_witnesses": { "x": [1], "y": [2] }, + "return_witnesses": [3, 2, 4, 4] + } +} +``` + +Whereas (with the `distinct` keyword) + +```rust +fn main(x : pub Field, y : pub Field) -> distinct pub [Field; 4] { + let a = 1; + let b = 1; + [x + 1, y, a, b] +} +``` + +compiles to + +```json +{ + //... + "abi": { + //... + "param_witnesses": { "x": [1], "y": [2] }, + //... + "return_witnesses": [3, 4, 5, 6] + } +} +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/functions.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/functions.md new file mode 100644 index 00000000000..f656cdfd97a --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/functions.md @@ -0,0 +1,226 @@ +--- +title: Functions +description: + Learn how to declare functions and methods in Noir, a programming language with Rust semantics. + This guide covers parameter declaration, return types, call expressions, and more. +keywords: [Noir, Rust, functions, methods, parameter declaration, return types, call expressions] +sidebar_position: 1 +--- + +Functions in Noir follow the same semantics of Rust, though Noir does not support early returns. + +To declare a function the `fn` keyword is used. + +```rust +fn foo() {} +``` + +By default, functions are visible only within the package they are defined. To make them visible outside of that package (for example, as part of a [library](../modules_packages_crates/crates_and_packages.md#libraries)), you should mark them as `pub`: + +```rust +pub fn foo() {} +``` + +You can also restrict the visibility of the function to only the crate it was defined in, by specifying `pub(crate)`: + +```rust +pub(crate) fn foo() {} //foo can only be called within its crate +``` + +All parameters in a function must have a type and all types are known at compile time. The parameter +is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. + +```rust +fn foo(x : Field, y : Field){} +``` + +The return type of a function can be stated by using the `->` arrow notation. The function below +states that the foo function must return a `Field`. If the function returns no value, then the arrow +is omitted. + +```rust +fn foo(x : Field, y : Field) -> Field { + x + y +} +``` + +Note that a `return` keyword is unneeded in this case - the last expression in a function's body is +returned. + +## Main function + +If you're writing a binary, the `main` function is the starting point of your program. You can pass all types of expressions to it, as long as they have a fixed size at compile time: + +```rust +fn main(x : Field) // this is fine: passing a Field +fn main(x : [Field; 2]) // this is also fine: passing a Field with known size at compile-time +fn main(x : (Field, bool)) // 👌: passing a (Field, bool) tuple means size 2 +fn main(x : str<5>) // this is fine, as long as you pass a string of size 5 + +fn main(x : Vec) // can't compile, has variable size +fn main(x : [Field]) // can't compile, has variable size +fn main(....// i think you got it by now +``` + +Keep in mind [tests](../../tooling/testing.md) don't differentiate between `main` and any other function. The following snippet passes tests, but won't compile or prove: + +```rust +fn main(x : [Field]) { + assert(x[0] == 1); +} + +#[test] +fn test_one() { + main(&[1, 2]); +} +``` + +```bash +$ nargo test +[testing] Running 1 test functions +[testing] Testing test_one... ok +[testing] All tests passed + +$ nargo check +The application panicked (crashed). +Message: Cannot have variable sized arrays as a parameter to main +``` + +## Call Expressions + +Calling a function in Noir is executed by using the function name and passing in the necessary +arguments. + +Below we show how to call the `foo` function from the `main` function using a call expression: + +```rust +fn main(x : Field, y : Field) { + let z = foo(x); +} + +fn foo(x : Field) -> Field { + x + x +} +``` + +## Methods + +You can define methods in Noir on any struct type in scope. + +```rust +struct MyStruct { + foo: Field, + bar: Field, +} + +impl MyStruct { + fn new(foo: Field) -> MyStruct { + MyStruct { + foo, + bar: 2, + } + } + + fn sum(self) -> Field { + self.foo + self.bar + } +} + +fn main() { + let s = MyStruct::new(40); + assert(s.sum() == 42); +} +``` + +Methods are just syntactic sugar for functions, so if we wanted to we could also call `sum` as +follows: + +```rust +assert(MyStruct::sum(s) == 42); +``` + +It is also possible to specialize which method is chosen depending on the [generic](./generics.md) type that is used. In this example, the `foo` function returns different values depending on its type: + +```rust +struct Foo {} + +impl Foo { + fn foo(self) -> Field { 1 } +} + +impl Foo { + fn foo(self) -> Field { 2 } +} + +fn main() { + let f1: Foo = Foo{}; + let f2: Foo = Foo{}; + assert(f1.foo() + f2.foo() == 3); +} +``` + +Also note that impls with the same method name defined in them cannot overlap. For example, if we already have `foo` defined for `Foo` and `Foo` like we do above, we cannot also define `foo` in an `impl Foo` since it would be ambiguous which version of `foo` to choose. + +```rust +// Including this impl in the same project as the above snippet would +// cause an overlapping impls error +impl Foo { + fn foo(self) -> Field { 3 } +} +``` + +## Lambdas + +Lambdas are anonymous functions. They follow the syntax of Rust - `|arg1, arg2, ..., argN| return_expression`. + +```rust +let add_50 = |val| val + 50; +assert(add_50(100) == 150); +``` + +See [Lambdas](./lambdas.md) for more details. + +## Attributes + +Attributes are metadata that can be applied to a function, using the following syntax: `#[attribute(value)]`. + +Supported attributes include: + +- **builtin**: the function is implemented by the compiler, for efficiency purposes. +- **deprecated**: mark the function as _deprecated_. Calling the function will generate a warning: `warning: use of deprecated function` +- **field**: Used to enable conditional compilation of code depending on the field size. See below for more details +- **oracle**: mark the function as _oracle_; meaning it is an external unconstrained function, implemented in noir_js. See [Unconstrained](./unconstrained.md) and [NoirJS](../../reference/NoirJS/noir_js/index.md) for more details. +- **test**: mark the function as unit tests. See [Tests](../../tooling/testing.md) for more details + +### Field Attribute + +The field attribute defines which field the function is compatible for. The function is conditionally compiled, under the condition that the field attribute matches the Noir native field. +The field can be defined implicitly, by using the name of the elliptic curve usually associated to it - for instance bn254, bls12_381 - or explicitly by using the field (prime) order, in decimal or hexadecimal form. +As a result, it is possible to define multiple versions of a function with each version specialized for a different field attribute. This can be useful when a function requires different parameters depending on the underlying elliptic curve. + +Example: we define the function `foo()` three times below. Once for the default Noir bn254 curve, once for the field $\mathbb F_{23}$, which will normally never be used by Noir, and once again for the bls12_381 curve. + +```rust +#[field(bn254)] +fn foo() -> u32 { + 1 +} + +#[field(23)] +fn foo() -> u32 { + 2 +} + +// This commented code would not compile as foo would be defined twice because it is the same field as bn254 +// #[field(21888242871839275222246405745257275088548364400416034343698204186575808495617)] +// fn foo() -> u32 { +// 2 +// } + +#[field(bls12_381)] +fn foo() -> u32 { + 3 +} +``` + +If the field name is not known to Noir, it will discard the function. Field names are case insensitive. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/generics.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/generics.md new file mode 100644 index 00000000000..ddd42bf1f9b --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/generics.md @@ -0,0 +1,106 @@ +--- +title: Generics +description: Learn how to use Generics in Noir +keywords: [Noir, Rust, generics, functions, structs] +sidebar_position: 7 +--- + +Generics allow you to use the same functions with multiple different concrete data types. You can +read more about the concept of generics in the Rust documentation +[here](https://doc.rust-lang.org/book/ch10-01-syntax.html). + +Here is a trivial example showing the identity function that supports any type. In Rust, it is +common to refer to the most general type as `T`. We follow the same convention in Noir. + +```rust +fn id(x: T) -> T { + x +} +``` + +## In Structs + +Generics are useful for specifying types in structs. For example, we can specify that a field in a +struct will be of a certain generic type. In this case `value` is of type `T`. + +```rust +struct RepeatedValue { + value: T, + count: Field, +} + +impl RepeatedValue { + fn print(self) { + for _i in 0 .. self.count { + println(self.value); + } + } +} + +fn main() { + let repeated = RepeatedValue { value: "Hello!", count: 2 }; + repeated.print(); +} +``` + +The `print` function will print `Hello!` an arbitrary number of times, twice in this case. + +If we want to be generic over array lengths (which are type-level integers), we can use numeric +generics. Using these looks just like using regular generics, but these generics can resolve to +integers at compile-time, rather than resolving to types. Here's an example of a struct that is +generic over the size of the array it contains internally: + +```rust +struct BigInt { + limbs: [u32; N], +} + +impl BigInt { + // `N` is in scope of all methods in the impl + fn first(first: BigInt, second: BigInt) -> Self { + assert(first.limbs != second.limbs); + first + + fn second(first: BigInt, second: Self) -> Self { + assert(first.limbs != second.limbs); + second + } +} +``` + +## Calling functions on generic parameters + +Since a generic type `T` can represent any type, how can we call functions on the underlying type? +In other words, how can we go from "any type `T`" to "any type `T` that has certain methods available?" + +This is what [traits](../concepts/traits) are for in Noir. Here's an example of a function generic over +any type `T` that implements the `Eq` trait for equality: + +```rust +fn first_element_is_equal(array1: [T; N], array2: [T; N]) -> bool + where T: Eq +{ + if (array1.len() == 0) | (array2.len() == 0) { + true + } else { + array1[0] == array2[0] + } +} + +fn main() { + assert(first_element_is_equal([1, 2, 3], [1, 5, 6])); + + // We can use first_element_is_equal for arrays of any type + // as long as we have an Eq impl for the types we pass in + let array = [MyStruct::new(), MyStruct::new()]; + assert(array_eq(array, array, MyStruct::eq)); +} + +impl Eq for MyStruct { + fn eq(self, other: MyStruct) -> bool { + self.foo == other.foo + } +} +``` + +You can find more details on traits and trait implementations on the [traits page](../concepts/traits). diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/globals.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/globals.md new file mode 100644 index 00000000000..063a3d89248 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/globals.md @@ -0,0 +1,72 @@ +--- +title: Global Variables +description: + Learn about global variables in Noir. Discover how + to declare, modify, and use them in your programs. +keywords: [noir programming language, globals, global variables, constants] +sidebar_position: 8 +--- + +## Globals + + +Noir supports global variables. The global's type can be inferred by the compiler entirely: + +```rust +global N = 5; // Same as `global N: Field = 5` + +global TUPLE = (3, 2); + +fn main() { + assert(N == 5); + assert(N == TUPLE.0 + TUPLE.1); +} +``` + +:::info + +Globals can be defined as any expression, so long as they don't depend on themselves - otherwise there would be a dependency cycle! For example: + +```rust +global T = foo(T); // dependency error +``` + +::: + + +If they are initialized to a literal integer, globals can be used to specify an array's length: + +```rust +global N: Field = 2; + +fn main(y : [Field; N]) { + assert(y[0] == y[1]) +} +``` + +A global from another module can be imported or referenced externally like any other name: + +```rust +global N = 20; + +fn main() { + assert(my_submodule::N != N); +} + +mod my_submodule { + global N: Field = 10; +} +``` + +When a global is used, Noir replaces the name with its definition on each occurrence. +This means globals defined using function calls will repeat the call each time they're used: + +```rust +global RESULT = foo(); + +fn foo() -> [Field; 100] { ... } +``` + +This is usually fine since Noir will generally optimize any function call that does not +refer to a program input into a constant. It should be kept in mind however, if the called +function performs side-effects like `println`, as these will still occur on each use. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/lambdas.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/lambdas.md new file mode 100644 index 00000000000..be3c7e0b5ca --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/lambdas.md @@ -0,0 +1,81 @@ +--- +title: Lambdas +description: Learn how to use anonymous functions in Noir programming language. +keywords: [Noir programming language, lambda, closure, function, anonymous function] +sidebar_position: 9 +--- + +## Introduction + +Lambdas are anonymous functions. The syntax is `|arg1, arg2, ..., argN| return_expression`. + +```rust +let add_50 = |val| val + 50; +assert(add_50(100) == 150); +``` + +A block can be used as the body of a lambda, allowing you to declare local variables inside it: + +```rust +let cool = || { + let x = 100; + let y = 100; + x + y +} + +assert(cool() == 200); +``` + +## Closures + +Inside the body of a lambda, you can use variables defined in the enclosing function. Such lambdas are called **closures**. In this example `x` is defined inside `main` and is accessed from within the lambda: + +```rust +fn main() { + let x = 100; + let closure = || x + 150; + assert(closure() == 250); +} +``` + +## Passing closures to higher-order functions + +It may catch you by surprise that the following code fails to compile: + +```rust +fn foo(f: fn () -> Field) -> Field { + f() +} + +fn main() { + let (x, y) = (50, 50); + assert(foo(|| x + y) == 100); // error :( +} +``` + +The reason is that the closure's capture environment affects its type - we have a closure that captures two Fields and `foo` +expects a regular function as an argument - those are incompatible. +:::note + +Variables contained within the `||` are the closure's parameters, and the expression that follows it is the closure's body. The capture environment is comprised of any variables used in the closure's body that are not parameters. + +E.g. in |x| x + y, y would be a captured variable, but x would not be, since it is a parameter of the closure. + +::: +The syntax for the type of a closure is `fn[env](args) -> ret_type`, where `env` is the capture environment of the closure - +in this example that's `(Field, Field)`. + +The best solution in our case is to make `foo` generic over the environment type of its parameter, so that it can be called +with closures with any environment, as well as with regular functions: + +```rust +fn foo(f: fn[Env]() -> Field) -> Field { + f() +} + +fn main() { + let (x, y) = (50, 50); + assert(foo(|| x + y) == 100); // compiles fine + assert(foo(|| 60) == 60); // compiles fine +} +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/mutability.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/mutability.md new file mode 100644 index 00000000000..fdeef6a87c5 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/mutability.md @@ -0,0 +1,121 @@ +--- +title: Mutability +description: + Learn about mutable variables in Noir. Discover how + to declare, modify, and use them in your programs. +keywords: [noir programming language, mutability in noir, mutable variables] +sidebar_position: 8 +--- + +Variables in noir can be declared mutable via the `mut` keyword. Mutable variables can be reassigned +to via an assignment expression. + +```rust +let x = 2; +x = 3; // error: x must be mutable to be assigned to + +let mut y = 3; +let y = 4; // OK +``` + +The `mut` modifier can also apply to patterns: + +```rust +let (a, mut b) = (1, 2); +a = 11; // error: a must be mutable to be assigned to +b = 12; // OK + +let mut (c, d) = (3, 4); +c = 13; // OK +d = 14; // OK + +// etc. +let MyStruct { x: mut y } = MyStruct { x: a }; +// y is now in scope +``` + +Note that mutability in noir is local and everything is passed by value, so if a called function +mutates its parameters then the parent function will keep the old value of the parameters. + +```rust +fn main() -> pub Field { + let x = 3; + helper(x); + x // x is still 3 +} + +fn helper(mut x: i32) { + x = 4; +} +``` + +## Non-local mutability + +Non-local mutability can be achieved through the mutable reference type `&mut T`: + +```rust +fn set_to_zero(x: &mut Field) { + *x = 0; +} + +fn main() { + let mut y = 42; + set_to_zero(&mut y); + assert(*y == 0); +} +``` + +When creating a mutable reference, the original variable being referred to (`y` in this +example) must also be mutable. Since mutable references are a reference type, they must +be explicitly dereferenced via `*` to retrieve the underlying value. Note that this yields +a copy of the value, so mutating this copy will not change the original value behind the +reference: + +```rust +fn main() { + let mut x = 1; + let x_ref = &mut x; + + let mut y = *x_ref; + let y_ref = &mut y; + + x = 2; + *x_ref = 3; + + y = 4; + *y_ref = 5; + + assert(x == 3); + assert(*x_ref == 3); + assert(y == 5); + assert(*y_ref == 5); +} +``` + +Note that types in Noir are actually deeply immutable so the copy that occurs when +dereferencing is only a conceptual copy - no additional constraints will occur. + +Mutable references can also be stored within structs. Note that there is also +no lifetime parameter on these unlike rust. This is because the allocated memory +always lasts the entire program - as if it were an array of one element. + +```rust +struct Foo { + x: &mut Field +} + +impl Foo { + fn incr(mut self) { + *self.x += 1; + } +} + +fn main() { + let foo = Foo { x: &mut 0 }; + foo.incr(); + assert(*foo.x == 1); +} +``` + +In general, you should avoid non-local & shared mutability unless it is needed. Sticking +to only local mutability will improve readability and potentially improve compiler optimizations as well. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/ops.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/ops.md new file mode 100644 index 00000000000..60425cb8994 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/ops.md @@ -0,0 +1,98 @@ +--- +title: Logical Operations +description: + Learn about the supported arithmetic and logical operations in the Noir programming language. + Discover how to perform operations on private input types, integers, and booleans. +keywords: + [ + Noir programming language, + supported operations, + arithmetic operations, + logical operations, + predicate operators, + bitwise operations, + short-circuiting, + backend, + ] +sidebar_position: 3 +--- + +# Operations + +## Table of Supported Operations + +| Operation | Description | Requirements | +| :-------- | :------------------------------------------------------------: | -------------------------------------: | +| + | Adds two private input types together | Types must be private input | +| - | Subtracts two private input types together | Types must be private input | +| \* | Multiplies two private input types together | Types must be private input | +| / | Divides two private input types together | Types must be private input | +| ^ | XOR two private input types together | Types must be integer | +| & | AND two private input types together | Types must be integer | +| \| | OR two private input types together | Types must be integer | +| \<\< | Left shift an integer by another integer amount | Types must be integer | +| >> | Right shift an integer by another integer amount | Types must be integer | +| ! | Bitwise not of a value | Type must be integer or boolean | +| \< | returns a bool if one value is less than the other | Upper bound must have a known bit size | +| \<= | returns a bool if one value is less than or equal to the other | Upper bound must have a known bit size | +| > | returns a bool if one value is more than the other | Upper bound must have a known bit size | +| >= | returns a bool if one value is more than or equal to the other | Upper bound must have a known bit size | +| == | returns a bool if one value is equal to the other | Both types must not be constants | +| != | returns a bool if one value is not equal to the other | Both types must not be constants | + +### Predicate Operators + +`<,<=, !=, == , >, >=` are known as predicate/comparison operations because they compare two values. +This differs from the operations such as `+` where the operands are used in _computation_. + +### Bitwise Operations Example + +```rust +fn main(x : Field) { + let y = x as u32; + let z = y & y; +} +``` + +`z` is implicitly constrained to be the result of `y & y`. The `&` operand is used to denote bitwise +`&`. + +> `x & x` would not compile as `x` is a `Field` and not an integer type. + +### Logical Operators + +Noir has no support for the logical operators `||` and `&&`. This is because encoding the +short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can +use the bitwise operators `|` and `&` which operate identically for booleans, just without the +short-circuiting. + +```rust +let my_val = 5; + +let mut flag = 1; +if (my_val > 6) | (my_val == 0) { + flag = 0; +} +assert(flag == 1); + +if (my_val != 10) & (my_val < 50) { + flag = 0; +} +assert(flag == 0); +``` + +### Shorthand operators + +Noir shorthand operators for most of the above operators, namely `+=, -=, *=, /=, %=, &=, |=, ^=, <<=`, and `>>=`. These allow for more concise syntax. For example: + +```rust +let mut i = 0; +i = i + 1; +``` + +could be written as: + +```rust +let mut i = 0; +i += 1; +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/oracles.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/oracles.md new file mode 100644 index 00000000000..aa380b5f7b8 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/oracles.md @@ -0,0 +1,31 @@ +--- +title: Oracles +description: Dive into how Noir supports Oracles via RPC calls, and learn how to declare an Oracle in Noir with our comprehensive guide. +keywords: + - Noir + - Oracles + - RPC Calls + - Unconstrained Functions + - Programming + - Blockchain +sidebar_position: 6 +--- + +:::note + +This is an experimental feature that is not fully documented. If you notice any outdated information or potential improvements to this page, pull request contributions are very welcome: https://github.com/noir-lang/noir + +::: + +Noir has support for Oracles via RPC calls. This means Noir will make an RPC call and use the return value for proof generation. + +Since Oracles are not resolved by Noir, they are [`unconstrained` functions](./unconstrained.md) + +You can declare an Oracle through the `#[oracle()]` flag. Example: + +```rust +#[oracle(get_number_sequence)] +unconstrained fn get_number_sequence(_size: Field) -> [Field] {} +``` + +The timeout for when using an external RPC oracle resolver can be set with the `NARGO_FOREIGN_CALL_TIMEOUT` environment variable. This timeout is in units of milliseconds. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/shadowing.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/shadowing.md new file mode 100644 index 00000000000..5ce6130d201 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/shadowing.md @@ -0,0 +1,44 @@ +--- +title: Shadowing +sidebar_position: 12 +--- + +Noir allows for inheriting variables' values and re-declaring them with the same name similar to Rust, known as shadowing. + +For example, the following function is valid in Noir: + +```rust +fn main() { + let x = 5; + + { + let x = x * 2; + assert (x == 10); + } + + assert (x == 5); +} +``` + +In this example, a variable x is first defined with the value 5. + +The local scope that follows shadows the original x, i.e. creates a local mutable x based on the value of the original x. It is given a value of 2 times the original x. + +When we return to the main scope, x once again refers to just the original x, which stays at the value of 5. + +## Temporal mutability + +One way that shadowing is useful, in addition to ergonomics across scopes, is for temporarily mutating variables. + +```rust +fn main() { + let age = 30; + // age = age + 5; // Would error as `age` is immutable by default. + + let mut age = age + 5; // Temporarily mutates `age` with a new value. + + let age = age; // Locks `age`'s mutability again. + + assert (age == 35); +} +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/traits.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/traits.md new file mode 100644 index 00000000000..ef1445a5907 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/traits.md @@ -0,0 +1,389 @@ +--- +title: Traits +description: + Traits in Noir can be used to abstract out a common interface for functions across + several data types. +keywords: [noir programming language, traits, interfaces, generic, protocol] +sidebar_position: 14 +--- + +## Overview + +Traits in Noir are a useful abstraction similar to interfaces or protocols in other languages. Each trait defines +the interface of several methods contained within the trait. Types can then implement this trait by providing +implementations for these methods. For example in the program: + +```rust +struct Rectangle { + width: Field, + height: Field, +} + +impl Rectangle { + fn area(self) -> Field { + self.width * self.height + } +} + +fn log_area(r: Rectangle) { + println(r.area()); +} +``` + +We have a function `log_area` to log the area of a `Rectangle`. Now how should we change the program if we want this +function to work on `Triangle`s as well?: + +```rust +struct Triangle { + width: Field, + height: Field, +} + +impl Triangle { + fn area(self) -> Field { + self.width * self.height / 2 + } +} +``` + +Making `log_area` generic over all types `T` would be invalid since not all types have an `area` method. Instead, we can +introduce a new `Area` trait and make `log_area` generic over all types `T` that implement `Area`: + +```rust +trait Area { + fn area(self) -> Field; +} + +fn log_area(shape: T) where T: Area { + println(shape.area()); +} +``` + +We also need to explicitly implement `Area` for `Rectangle` and `Triangle`. We can do that by changing their existing +impls slightly. Note that the parameter types and return type of each of our `area` methods must match those defined +by the `Area` trait. + +```rust +impl Area for Rectangle { + fn area(self) -> Field { + self.width * self.height + } +} + +impl Area for Triangle { + fn area(self) -> Field { + self.width * self.height / 2 + } +} +``` + +Now we have a working program that is generic over any type of Shape that is used! Others can even use this program +as a library with their own types - such as `Circle` - as long as they also implement `Area` for these types. + +## Where Clauses + +As seen in `log_area` above, when we want to create a function or method that is generic over any type that implements +a trait, we can add a where clause to the generic function. + +```rust +fn log_area(shape: T) where T: Area { + println(shape.area()); +} +``` + +It is also possible to apply multiple trait constraints on the same variable at once by combining traits with the `+` +operator. Similarly, we can have multiple trait constraints by separating each with a comma: + +```rust +fn foo(elements: [T], thing: U) where + T: Default + Add + Eq, + U: Bar, +{ + let mut sum = T::default(); + + for element in elements { + sum += element; + } + + if sum == T::default() { + thing.bar(); + } +} +``` + +## Generic Implementations + +You can add generics to a trait implementation by adding the generic list after the `impl` keyword: + +```rust +trait Second { + fn second(self) -> Field; +} + +impl Second for (T, Field) { + fn second(self) -> Field { + self.1 + } +} +``` + +You can also implement a trait for every type this way: + +```rust +trait Debug { + fn debug(self); +} + +impl Debug for T { + fn debug(self) { + println(self); + } +} + +fn main() { + 1.debug(); +} +``` + +### Generic Trait Implementations With Where Clauses + +Where clauses can also be placed on trait implementations themselves to restrict generics in a similar way. +For example, while `impl Foo for T` implements the trait `Foo` for every type, `impl Foo for T where T: Bar` +will implement `Foo` only for types that also implement `Bar`. This is often used for implementing generic types. +For example, here is the implementation for array equality: + +```rust +impl Eq for [T; N] where T: Eq { + // Test if two arrays have the same elements. + // Because both arrays must have length N, we know their lengths already match. + fn eq(self, other: Self) -> bool { + let mut result = true; + + for i in 0 .. self.len() { + // The T: Eq constraint is needed to call == on the array elements here + result &= self[i] == other[i]; + } + + result + } +} +``` + +## Generic Traits + +Traits themselves can also be generic by placing the generic arguments after the trait name. These generics are in +scope of every item within the trait. + +```rust +trait Into { + // Convert `self` to type `T` + fn into(self) -> T; +} +``` + +When implementing generic traits the generic arguments of the trait must be specified. This is also true anytime +when referencing a generic trait (e.g. in a `where` clause). + +```rust +struct MyStruct { + array: [Field; 2], +} + +impl Into<[Field; 2]> for MyStruct { + fn into(self) -> [Field; 2] { + self.array + } +} + +fn as_array(x: T) -> [Field; 2] + where T: Into<[Field; 2]> +{ + x.into() +} + +fn main() { + let array = [1, 2]; + let my_struct = MyStruct { array }; + + assert_eq(as_array(my_struct), array); +} +``` + +## Trait Methods With No `self` + +A trait can contain any number of methods, each of which have access to the `Self` type which represents each type +that eventually implements the trait. Similarly, the `self` variable is available as well but is not required to be used. +For example, we can define a trait to create a default value for a type. This trait will need to return the `Self` type +but doesn't need to take any parameters: + +```rust +trait Default { + fn default() -> Self; +} +``` + +Implementing this trait can be done similarly to any other trait: + +```rust +impl Default for Field { + fn default() -> Field { + 0 + } +} + +struct MyType {} + +impl Default for MyType { + fn default() -> Field { + MyType {} + } +} +``` + +However, since there is no `self` parameter, we cannot call it via the method call syntax `object.method()`. +Instead, we'll need to refer to the function directly. This can be done either by referring to the +specific impl `MyType::default()` or referring to the trait itself `Default::default()`. In the later +case, type inference determines the impl that is selected. + +```rust +let my_struct = MyStruct::default(); + +let x: Field = Default::default(); +let result = x + Default::default(); +``` + +:::warning + +```rust +let _ = Default::default(); +``` + +If type inference cannot select which impl to use because of an ambiguous `Self` type, an impl will be +arbitrarily selected. This occurs most often when the result of a trait function call with no parameters +is unused. To avoid this, when calling a trait function with no `self` or `Self` parameters or return type, +always refer to it via the implementation type's namespace - e.g. `MyType::default()`. +This is set to change to an error in future Noir versions. + +::: + +## Default Method Implementations + +A trait can also have default implementations of its methods by giving a body to the desired functions. +Note that this body must be valid for all types that may implement the trait. As a result, the only +valid operations on `self` will be operations valid for any type or other operations on the trait itself. + +```rust +trait Numeric { + fn add(self, other: Self) -> Self; + + // Default implementation of double is (self + self) + fn double(self) -> Self { + self.add(self) + } +} +``` + +When implementing a trait with default functions, a type may choose to implement only the required functions: + +```rust +impl Numeric for Field { + fn add(self, other: Field) -> Field { + self + other + } +} +``` + +Or it may implement the optional methods as well: + +```rust +impl Numeric for u32 { + fn add(self, other: u32) -> u32 { + self + other + } + + fn double(self) -> u32 { + self * 2 + } +} +``` + +## Impl Specialization + +When implementing traits for a generic type it is possible to implement the trait for only a certain combination +of generics. This can be either as an optimization or because those specific generics are required to implement the trait. + +```rust +trait Sub { + fn sub(self, other: Self) -> Self; +} + +struct NonZero { + value: T, +} + +impl Sub for NonZero { + fn sub(self, other: Self) -> Self { + let value = self.value - other.value; + assert(value != 0); + NonZero { value } + } +} +``` + +## Overlapping Implementations + +Overlapping implementations are disallowed by Noir to ensure Noir's decision on which impl to select is never ambiguous. +This means if a trait `Foo` is already implemented +by a type `Bar` for all `T`, then we cannot also have a separate impl for `Bar` (or any other +type argument). Similarly, if there is an impl for all `T` such as `impl Debug for T`, we cannot create +any more impls to `Debug` for other types since it would be ambiguous which impl to choose for any given +method call. + +```rust +trait Trait {} + +// Previous impl defined here +impl Trait for (A, B) {} + +// error: Impl for type `(Field, Field)` overlaps with existing impl +impl Trait for (Field, Field) {} +``` + +## Trait Coherence + +Another restriction on trait implementations is coherence. This restriction ensures other crates cannot create +impls that may overlap with other impls, even if several unrelated crates are used as dependencies in the same +program. + +The coherence restriction is: to implement a trait, either the trait itself or the object type must be declared +in the crate the impl is in. + +In practice this often comes up when using types provided by libraries. If a library provides a type `Foo` that does +not implement a trait in the standard library such as `Default`, you may not `impl Default for Foo` in your own crate. +While restrictive, this prevents later issues or silent changes in the program if the `Foo` library later added its +own impl for `Default`. If you are a user of the `Foo` library in this scenario and need a trait not implemented by the +library your choices are to either submit a patch to the library or use the newtype pattern. + +### The Newtype Pattern + +The newtype pattern gets around the coherence restriction by creating a new wrapper type around the library type +that we cannot create `impl`s for. Since the new wrapper type is defined in our current crate, we can create +impls for any trait we need on it. + +```rust +struct Wrapper { + foo: dep::some_library::Foo, +} + +impl Default for Wrapper { + fn default() -> Wrapper { + Wrapper { + foo: dep::some_library::Foo::new(), + } + } +} +``` + +Since we have an impl for our own type, the behavior of this code will not change even if `some_library` is updated +to provide its own `impl Default for Foo`. The downside of this pattern is that it requires extra wrapping and +unwrapping of values when converting to and from the `Wrapper` and `Foo` types. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/unconstrained.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/unconstrained.md new file mode 100644 index 00000000000..b8e71fe65f0 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/concepts/unconstrained.md @@ -0,0 +1,99 @@ +--- +title: Unconstrained Functions +description: "Learn about what unconstrained functions in Noir are, how to use them and when you'd want to." + +keywords: [Noir programming language, unconstrained, open] +sidebar_position: 5 +--- + +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. + +## Why? + +Zero-knowledge (ZK) domain-specific languages (DSL) enable developers to generate ZK proofs from their programs by compiling code down to the constraints of an NP complete language (such as R1CS or PLONKish languages). However, the hard bounds of a constraint system can be very limiting to the functionality of a ZK DSL. + +Enabling a circuit language to perform unconstrained execution is a powerful tool. Said another way, unconstrained execution lets developers generate witnesses from code that does not generate any constraints. Being able to execute logic outside of a circuit is critical for both circuit performance and constructing proofs on information that is external to a circuit. + +Fetching information from somewhere external to a circuit can also be used to enable developers to improve circuit efficiency. + +A ZK DSL does not just prove computation, but proves that some computation was handled correctly. Thus, it is necessary that when we switch from performing some operation directly inside of a circuit to inside of an unconstrained environment that the appropriate constraints are still laid down elsewhere in the circuit. + +## Example + +An in depth example might help drive the point home. This example comes from the excellent [post](https://discord.com/channels/1113924620781883405/1124022445054111926/1128747641853972590) by Tom in the Noir Discord. + +Let's look at how we can optimize a function to turn a `u72` into an array of `u8`s. + +```rust +fn main(num: u72) -> pub [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8)) as u72 & 0xff) as u8; + } + + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 91 +Backend circuit size: 3619 +``` + +A lot of the operations in this function are optimized away by the compiler (all the bit-shifts turn into divisions by constants). However we can save a bunch of gates by casting to u8 a bit earlier. This automatically truncates the bit-shifted value to fit in a u8 which allows us to remove the AND against 0xff. This saves us ~480 gates in total. + +```rust +fn main(num: u72) -> pub [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8)) as u8; + } + + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 75 +Backend circuit size: 3143 +``` + +Those are some nice savings already but we can do better. This code is all constrained so we're proving every step of calculating out using num, but we don't actually care about how we calculate this, just that it's correct. This is where brillig comes in. + +It turns out that truncating a u72 into a u8 is hard to do inside a snark, each time we do as u8 we lay down 4 ACIR opcodes which get converted into multiple gates. It's actually much easier to calculate num from out than the other way around. All we need to do is multiply each element of out by a constant and add them all together, both relatively easy operations inside a snark. + +We can then run u72_to_u8 as unconstrained brillig code in order to calculate out, then use that result in our constrained function and assert that if we were to do the reverse calculation we'd get back num. This looks a little like the below: + +```rust +fn main(num: u72) -> pub [u8; 8] { + let out = u72_to_u8(num); + + let mut reconstructed_num: u72 = 0; + for i in 0..8 { + reconstructed_num += (out[i] as u72 << (56 - (8 * i))); + } + assert(num == reconstructed_num); + out +} + +unconstrained fn u72_to_u8(num: u72) -> [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8))) as u8; + } + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 78 +Backend circuit size: 2902 +``` + +This ends up taking off another ~250 gates from our circuit! We've ended up with more ACIR opcodes than before but they're easier for the backend to prove (resulting in fewer gates). + +Generally we want to use brillig whenever there's something that's easy to verify but hard to compute within the circuit. For example, if you wanted to calculate a square root of a number it'll be a much better idea to calculate this in brillig and then assert that if you square the result you get back your number. + +## Break and Continue + +In addition to loops over runtime bounds, `break` and `continue` are also available in unconstrained code. See [break and continue](../concepts/control_flow/#break-and-continue) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/modules_packages_crates/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/modules_packages_crates/_category_.json new file mode 100644 index 00000000000..1debcfe7675 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/modules_packages_crates/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Modules, Packages and Crates", + "position": 2, + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/modules_packages_crates/crates_and_packages.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/modules_packages_crates/crates_and_packages.md new file mode 100644 index 00000000000..95ee9f52ab2 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/modules_packages_crates/crates_and_packages.md @@ -0,0 +1,43 @@ +--- +title: Crates and Packages +description: Learn how to use Crates and Packages in your Noir project +keywords: [Nargo, dependencies, package management, crates, package] +sidebar_position: 0 +--- + +## Crates + +A crate is the smallest amount of code that the Noir compiler considers at a time. +Crates can contain modules, and the modules may be defined in other files that get compiled with the crate, as we’ll see in the coming sections. + +### Crate Types + +A Noir crate can come in several forms: binaries, libraries or contracts. + +#### Binaries + +_Binary crates_ are programs which you can compile to an ACIR circuit which you can then create proofs against. Each must have a function called `main` that defines the ACIR circuit which is to be proved. + +#### Libraries + +_Library crates_ don't have a `main` function and they don't compile down to ACIR. Instead they define functionality intended to be shared with multiple projects, and eventually included in a binary crate. + +#### Contracts + +Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/noir-projects/noir-contracts/contracts). + +### Crate Root + +Every crate has a root, which is the source file that the compiler starts, this is also known as the root module. The Noir compiler does not enforce any conditions on the name of the file which is the crate root, however if you are compiling via Nargo the crate root must be called `lib.nr` or `main.nr` for library or binary crates respectively. + +## Packages + +A Nargo _package_ is a collection of one of more crates that provides a set of functionality. A package must include a Nargo.toml file. + +A package _must_ contain either a library or a binary crate, but not both. + +### Differences from Cargo Packages + +One notable difference between Rust's Cargo and Noir's Nargo is that while Cargo allows a package to contain an unlimited number of binary crates and a single library crate, Nargo currently only allows a package to contain a single crate. + +In future this restriction may be lifted to allow a Nargo package to contain both a binary and library crate or multiple binary crates. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/modules_packages_crates/dependencies.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/modules_packages_crates/dependencies.md new file mode 100644 index 00000000000..04c1703d929 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/modules_packages_crates/dependencies.md @@ -0,0 +1,124 @@ +--- +title: Dependencies +description: + Learn how to specify and manage dependencies in Nargo, allowing you to upload packages to GitHub + and use them easily in your project. +keywords: [Nargo, dependencies, GitHub, package management, versioning] +sidebar_position: 1 +--- + +Nargo allows you to upload packages to GitHub and use them as dependencies. + +## Specifying a dependency + +Specifying a dependency requires a tag to a specific commit and the git url to the url containing +the package. + +Currently, there are no requirements on the tag contents. If requirements are added, it would follow +semver 2.0 guidelines. + +> Note: Without a `tag` , there would be no versioning and dependencies would change each time you +> compile your project. + +For example, to add the [ecrecover-noir library](https://github.com/colinnielsen/ecrecover-noir) to your project, add it to `Nargo.toml`: + +```toml +# Nargo.toml + +[dependencies] +ecrecover = {tag = "v0.8.0", git = "https://github.com/colinnielsen/ecrecover-noir"} +``` + +If the module is in a subdirectory, you can define a subdirectory in your git repository, for example: + +```toml +# Nargo.toml + +[dependencies] +easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "noir-contracts/contracts/easy_private_token_contract"} +``` + +## Specifying a local dependency + +You can also specify dependencies that are local to your machine. + +For example, this file structure has a library and binary crate + +```tree +├── binary_crate +│   ├── Nargo.toml +│   └── src +│   └── main.nr +└── lib_a + ├── Nargo.toml + └── src + └── lib.nr +``` + +Inside of the binary crate, you can specify: + +```toml +# Nargo.toml + +[dependencies] +lib_a = { path = "../lib_a" } +``` + +## Importing dependencies + +You can import a dependency to a Noir file using the following syntax. For example, to import the +ecrecover-noir library and local lib_a referenced above: + +```rust +use dep::ecrecover; +use dep::lib_a; +``` + +You can also import only the specific parts of dependency that you want to use, like so: + +```rust +use dep::std::hash::sha256; +use dep::std::scalar_mul::fixed_base_embedded_curve; +``` + +Lastly, as demonstrated in the +[elliptic curve example](../standard_library/cryptographic_primitives/ec_primitives#examples), you +can import multiple items in the same line by enclosing them in curly braces: + +```rust +use dep::std::ec::tecurve::affine::{Curve, Point}; +``` + +We don't have a way to consume libraries from inside a [workspace](./workspaces) as external dependencies right now. + +Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. + +## Dependencies of Dependencies + +Note that when you import a dependency, you also get access to all of the dependencies of that package. + +For example, the [phy_vector](https://github.com/resurgencelabs/phy_vector) library imports an [fraction](https://github.com/resurgencelabs/fraction) library. If you're importing the phy_vector library, then you can access the functions in fractions library like so: + +```rust +use dep::phy_vector; + +fn main(x : Field, y : pub Field) { + //... + let f = phy_vector::fraction::toFraction(true, 2, 1); + //... +} +``` + +## Available Libraries + +Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). + +Some libraries that are available today include: + +- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library +- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) +- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers +- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address +- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees +- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir +- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/modules_packages_crates/modules.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/modules_packages_crates/modules.md new file mode 100644 index 00000000000..ae822a1cff4 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/modules_packages_crates/modules.md @@ -0,0 +1,105 @@ +--- +title: Modules +description: + Learn how to organize your files using modules in Noir, following the same convention as Rust's + module system. Examples included. +keywords: [Noir, Rust, modules, organizing files, sub-modules] +sidebar_position: 2 +--- + +Noir's module system follows the same convention as the _newer_ version of Rust's module system. + +## Purpose of Modules + +Modules are used to organize files. Without modules all of your code would need to live in a single +file. In Noir, the compiler does not automatically scan all of your files to detect modules. This +must be done explicitly by the developer. + +## Examples + +### Importing a module in the crate root + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::hello_world(); +} +``` + +Filename : `src/foo.nr` + +```rust +fn from_foo() {} +``` + +In the above snippet, the crate root is the `src/main.nr` file. The compiler sees the module +declaration `mod foo` which prompts it to look for a foo.nr file. + +Visually this module hierarchy looks like the following : + +``` +crate + ├── main + │ + └── foo + └── from_foo + +``` + +### Importing a module throughout the tree + +All modules are accessible from the `crate::` namespace. + +``` +crate + ├── bar + ├── foo + └── main + +``` + +In the above snippet, if `bar` would like to use functions in `foo`, it can do so by `use crate::foo::function_name`. + +### Sub-modules + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::from_foo(); +} +``` + +Filename : `src/foo.nr` + +```rust +mod bar; +fn from_foo() {} +``` + +Filename : `src/foo/bar.nr` + +```rust +fn from_bar() {} +``` + +In the above snippet, we have added an extra module to the module tree; `bar`. `bar` is a submodule +of `foo` hence we declare bar in `foo.nr` with `mod bar`. Since `foo` is not the crate root, the +compiler looks for the file associated with the `bar` module in `src/foo/bar.nr` + +Visually the module hierarchy looks as follows: + +``` +crate + ├── main + │ + └── foo + ├── from_foo + └── bar + └── from_bar +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/modules_packages_crates/workspaces.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/modules_packages_crates/workspaces.md new file mode 100644 index 00000000000..513497f12bf --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/modules_packages_crates/workspaces.md @@ -0,0 +1,42 @@ +--- +title: Workspaces +sidebar_position: 3 +--- + +Workspaces are a feature of nargo that allow you to manage multiple related Noir packages in a single repository. A workspace is essentially a group of related projects that share common build output directories and configurations. + +Each Noir project (with it's own Nargo.toml file) can be thought of as a package. Each package is expected to contain exactly one "named circuit", being the "name" defined in Nargo.toml with the program logic defined in `./src/main.nr`. + +For a project with the following structure: + +```tree +├── crates +│ ├── a +│ │ ├── Nargo.toml +│ │ └── Prover.toml +│ │ └── src +│ │ └── main.nr +│ └── b +│ ├── Nargo.toml +│ └── Prover.toml +│ └── src +│ └── main.nr +│ +└── Nargo.toml +``` + +You can define a workspace in Nargo.toml like so: + +```toml +[workspace] +members = ["crates/a", "crates/b"] +default-member = "crates/a" +``` + +`members` indicates which packages are included in the workspace. As such, all member packages of a workspace will be processed when the `--workspace` flag is used with various commands or if a `default-member` is not specified. + +`default-member` indicates which package various commands process by default. + +Libraries can be defined in a workspace. Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. + +Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/_category_.json new file mode 100644 index 00000000000..af04c0933fd --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Standard Library", + "position": 1, + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/bigint.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/bigint.md new file mode 100644 index 00000000000..2bfdeec6631 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/bigint.md @@ -0,0 +1,122 @@ +--- +title: Big Integers +description: How to use big integers from Noir standard library +keywords: + [ + Big Integer, + Noir programming language, + Noir libraries, + ] +--- + +The BigInt module in the standard library exposes some class of integers which do not fit (well) into a Noir native field. It implements modulo arithmetic, modulo a 'big' prime number. + +:::note + +The module can currently be considered as `Field`s with fixed modulo sizes used by a set of elliptic curves, in addition to just the native curve. [More work](https://github.com/noir-lang/noir/issues/510) is needed to achieve arbitrarily sized big integers. + +::: + +Currently 6 classes of integers (i.e 'big' prime numbers) are available in the module, namely: + +- BN254 Fq: Bn254Fq +- BN254 Fr: Bn254Fr +- Secp256k1 Fq: Secpk1Fq +- Secp256k1 Fr: Secpk1Fr +- Secp256r1 Fr: Secpr1Fr +- Secp256r1 Fq: Secpr1Fq + +Where XXX Fq and XXX Fr denote respectively the order of the base and scalar field of the (usual) elliptic curve XXX. +For instance the big integer 'Secpk1Fq' in the standard library refers to integers modulo $2^{256}-2^{32}-977$. + +Feel free to explore the source code for the other primes: + +```rust title="big_int_definition" showLineNumbers +struct BigInt { + pointer: u32, + modulus: u32, +} +``` +> Source code: noir_stdlib/src/bigint.nr#L14-L19 + + +## Example usage + +A common use-case is when constructing a big integer from its bytes representation, and performing arithmetic operations on it: + +```rust title="big_int_example" showLineNumbers +fn big_int_example(x: u8, y: u8) { + let a = Secpk1Fq::from_le_bytes(&[x, y, 0, 45, 2]); + let b = Secpk1Fq::from_le_bytes(&[y, x, 9]); + let c = (a + b) * b / a; + let d = c.to_le_bytes(); + println(d[0]); +} +``` +> Source code: test_programs/execution_success/bigint/src/main.nr#L70-L78 + + +## Methods + +The available operations for each big integer are: + +### from_le_bytes + +Construct a big integer from its little-endian bytes representation. Example: + +```rust + // Construct a big integer from a slice of bytes + let a = Secpk1Fq::from_le_bytes(&[x, y, 0, 45, 2]); + // Construct a big integer from an array of 32 bytes + let a = Secpk1Fq::from_le_bytes_32([1;32]); + ``` + +Sure, here's the formatted version of the remaining methods: + +### to_le_bytes + +Return the little-endian bytes representation of a big integer. Example: + +```rust +let bytes = a.to_le_bytes(); +``` + +### add + +Add two big integers. Example: + +```rust +let sum = a + b; +``` + +### sub + +Subtract two big integers. Example: + +```rust +let difference = a - b; +``` + +### mul + +Multiply two big integers. Example: + +```rust +let product = a * b; +``` + +### div + +Divide two big integers. Note that division is field division and not euclidean division. Example: + +```rust +let quotient = a / b; +``` + +### eq + +Compare two big integers. Example: + +```rust +let are_equal = a == b; +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/black_box_fns.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/black_box_fns.md new file mode 100644 index 00000000000..be8c65679c3 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/black_box_fns.md @@ -0,0 +1,31 @@ +--- +title: Black Box Functions +description: Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. +keywords: [noir, black box functions] +--- + +Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. This makes certain zk-snark unfriendly computations cheaper than if they were implemented in Noir. + +The ACVM spec defines a set of blackbox functions which backends will be expected to implement. This allows backends to use optimized implementations of these constraints if they have them, however they may also fallback to less efficient naive implementations if not. + +## Function list + +Here is a list of the current black box functions: + +- [SHA256](./cryptographic_primitives/hashes.mdx#sha256) +- [Schnorr signature verification](./cryptographic_primitives/schnorr.mdx) +- [Blake2s](./cryptographic_primitives/hashes.mdx#blake2s) +- [Blake3](./cryptographic_primitives/hashes.mdx#blake3) +- [Pedersen Hash](./cryptographic_primitives/hashes.mdx#pedersen_hash) +- [Pedersen Commitment](./cryptographic_primitives/hashes.mdx#pedersen_commitment) +- [ECDSA signature verification](./cryptographic_primitives/ecdsa_sig_verification.mdx) +- [Fixed base scalar multiplication](./cryptographic_primitives/scalar.mdx) +- AND +- XOR +- RANGE +- [Keccak256](./cryptographic_primitives/hashes.mdx#keccak256) +- [Recursive proof verification](./recursion) + +Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. + +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/bn254.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/bn254.md new file mode 100644 index 00000000000..3294f005dbb --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/bn254.md @@ -0,0 +1,46 @@ +--- +title: Bn254 Field Library +--- + +Noir provides a module in standard library with some optimized functions for bn254 Fr in `std::field::bn254`. + +## decompose + +```rust +fn decompose(x: Field) -> (Field, Field) {} +``` + +Decomposes a single field into two fields, low and high. The low field contains the lower 16 bytes of the input field and the high field contains the upper 16 bytes of the input field. Both field results are range checked to 128 bits. + + +## assert_gt + +```rust +fn assert_gt(a: Field, b: Field) {} +``` + +Asserts that a > b. This will generate less constraints than using `assert(gt(a, b))`. + +## assert_lt + +```rust +fn assert_lt(a: Field, b: Field) {} +``` + +Asserts that a < b. This will generate less constraints than using `assert(lt(a, b))`. + +## gt + +```rust +fn gt(a: Field, b: Field) -> bool {} +``` + +Returns true if a > b. + +## lt + +```rust +fn lt(a: Field, b: Field) -> bool {} +``` + +Returns true if a < b. \ No newline at end of file diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/containers/boundedvec.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/containers/boundedvec.md new file mode 100644 index 00000000000..ce4529f6e57 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/containers/boundedvec.md @@ -0,0 +1,326 @@ +--- +title: Bounded Vectors +keywords: [noir, vector, bounded vector, slice] +sidebar_position: 1 +--- + +A `BoundedVec` is a growable storage similar to a `Vec` except that it +is bounded with a maximum possible length. Unlike `Vec`, `BoundedVec` is not implemented +via slices and thus is not subject to the same restrictions slices are (notably, nested +slices - and thus nested vectors as well - are disallowed). + +Since a BoundedVec is backed by a normal array under the hood, growing the BoundedVec by +pushing an additional element is also more efficient - the length only needs to be increased +by one. + +For these reasons `BoundedVec` should generally be preferred over `Vec` when there +is a reasonable maximum bound that can be placed on the vector. + +Example: + +```rust +let mut vector: BoundedVec = BoundedVec::new(); +for i in 0..5 { + vector.push(i); +} +assert(vector.len() == 5); +assert(vector.max_len() == 10); +``` + +## Methods + +### new + +```rust +pub fn new() -> Self +``` + +Creates a new, empty vector of length zero. + +Since this container is backed by an array internally, it still needs an initial value +to give each element. To resolve this, each element is zeroed internally. This value +is guaranteed to be inaccessible unless `get_unchecked` is used. + +Example: + +```rust +let empty_vector: BoundedVec = BoundedVec::new(); +assert(empty_vector.len() == 0); +``` + +Note that whenever calling `new` the maximum length of the vector should always be specified +via a type signature: + +```rust title="new_example" showLineNumbers +fn foo() -> BoundedVec { + // Ok! MaxLen is specified with a type annotation + let v1: BoundedVec = BoundedVec::new(); + let v2 = BoundedVec::new(); + + // Ok! MaxLen is known from the type of foo's return value + v2 +} + +fn bad() { + let mut v3 = BoundedVec::new(); + + // Not Ok! We don't know if v3's MaxLen is at least 1, and the compiler often infers 0 by default. + v3.push(5); +} +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L11-L27 + + +This defaulting of `MaxLen` (and numeric generics in general) to zero may change in future noir versions +but for now make sure to use type annotations when using bounded vectors. Otherwise, you will receive a constraint failure at runtime when the vec is pushed to. + +### get + +```rust +pub fn get(mut self: Self, index: u64) -> T { +``` + +Retrieves an element from the vector at the given index, starting from zero. + +If the given index is equal to or greater than the length of the vector, this +will issue a constraint failure. + +Example: + +```rust +fn foo(v: BoundedVec) { + let first = v.get(0); + let last = v.get(v.len() - 1); + assert(first != last); +} +``` + +### get_unchecked + +```rust +pub fn get_unchecked(mut self: Self, index: u64) -> T { +``` + +Retrieves an element from the vector at the given index, starting from zero, without +performing a bounds check. + +Since this function does not perform a bounds check on length before accessing the element, +it is unsafe! Use at your own risk! + +Example: + +```rust title="get_unchecked_example" showLineNumbers +fn sum_of_first_three(v: BoundedVec) -> u32 { + // Always ensure the length is larger than the largest + // index passed to get_unchecked + assert(v.len() > 2); + let first = v.get_unchecked(0); + let second = v.get_unchecked(1); + let third = v.get_unchecked(2); + first + second + third +} +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L54-L64 + + + +### push + +```rust +pub fn push(&mut self, elem: T) { +``` + +Pushes an element to the end of the vector. This increases the length +of the vector by one. + +Panics if the new length of the vector will be greater than the max length. + +Example: + +```rust title="bounded-vec-push-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + + v.push(1); + v.push(2); + + // Panics with failed assertion "push out of bounds" + v.push(3); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L68-L76 + + +### pop + +```rust +pub fn pop(&mut self) -> T +``` + +Pops the element at the end of the vector. This will decrease the length +of the vector by one. + +Panics if the vector is empty. + +Example: + +```rust title="bounded-vec-pop-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + v.push(1); + v.push(2); + + let two = v.pop(); + let one = v.pop(); + + assert(two == 2); + assert(one == 1); + // error: cannot pop from an empty vector + // let _ = v.pop(); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L81-L93 + + +### len + +```rust +pub fn len(self) -> u64 { +``` + +Returns the current length of this vector + +Example: + +```rust title="bounded-vec-len-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + assert(v.len() == 0); + + v.push(100); + assert(v.len() == 1); + + v.push(200); + v.push(300); + v.push(400); + assert(v.len() == 4); + + let _ = v.pop(); + let _ = v.pop(); + assert(v.len() == 2); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L98-L113 + + +### max_len + +```rust +pub fn max_len(_self: BoundedVec) -> u64 { +``` + +Returns the maximum length of this vector. This is always +equal to the `MaxLen` parameter this vector was initialized with. + +Example: + +```rust title="bounded-vec-max-len-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + + assert(v.max_len() == 5); + v.push(10); + assert(v.max_len() == 5); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L118-L124 + + +### storage + +```rust +pub fn storage(self) -> [T; MaxLen] { +``` + +Returns the internal array within this vector. +Since arrays in Noir are immutable, mutating the returned storage array will not mutate +the storage held internally by this vector. + +Note that uninitialized elements may be zeroed out! + +Example: + +```rust title="bounded-vec-storage-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + + assert(v.storage() == [0, 0, 0, 0, 0]); + + v.push(57); + assert(v.storage() == [57, 0, 0, 0, 0]); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L129-L136 + + +### extend_from_array + +```rust +pub fn extend_from_array(&mut self, array: [T; Len]) +``` + +Pushes each element from the given array to this vector. + +Panics if pushing each element would cause the length of this vector +to exceed the maximum length. + +Example: + +```rust title="bounded-vec-extend-from-array-example" showLineNumbers +let mut vec: BoundedVec = BoundedVec::new(); + vec.extend_from_array([2, 4]); + + assert(vec.len == 2); + assert(vec.get(0) == 2); + assert(vec.get(1) == 4); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L141-L148 + + +### extend_from_bounded_vec + +```rust +pub fn extend_from_bounded_vec(&mut self, vec: BoundedVec) +``` + +Pushes each element from the other vector to this vector. The length of +the other vector is left unchanged. + +Panics if pushing each element would cause the length of this vector +to exceed the maximum length. + +Example: + +```rust title="bounded-vec-extend-from-bounded-vec-example" showLineNumbers +let mut v1: BoundedVec = BoundedVec::new(); + let mut v2: BoundedVec = BoundedVec::new(); + + v2.extend_from_array([1, 2, 3]); + v1.extend_from_bounded_vec(v2); + + assert(v1.storage() == [1, 2, 3, 0, 0]); + assert(v2.storage() == [1, 2, 3, 0, 0, 0, 0]); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L153-L162 + + +### any + +```rust +pub fn any(self, predicate: fn[Env](T) -> bool) -> bool +``` + +Returns true if the given predicate returns true for any element +in this vector. + +Example: + +```rust title="bounded-vec-any-example" showLineNumbers +let mut v: BoundedVec = BoundedVec::new(); + v.extend_from_array([2, 4, 6]); + + let all_even = !v.any(|elem: u32| elem % 2 != 0); + assert(all_even); +``` +> Source code: test_programs/noir_test_success/bounded_vec/src/main.nr#L229-L235 + diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/containers/hashmap.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/containers/hashmap.md new file mode 100644 index 00000000000..47faa99aba6 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/containers/hashmap.md @@ -0,0 +1,570 @@ +--- +title: HashMap +keywords: [noir, map, hash, hashmap] +sidebar_position: 1 +--- + +`HashMap` is used to efficiently store and look up key-value pairs. + +`HashMap` is a bounded type which can store anywhere from zero to `MaxLen` total elements. +Note that due to hash collisions, the actual maximum number of elements stored by any particular +hashmap is likely lower than `MaxLen`. This is true even with cryptographic hash functions since +every hash value will be performed modulo `MaxLen`. + +When creating `HashMap`s, the `MaxLen` generic should always be specified if it is not already +known. Otherwise, the compiler may infer a different value for `MaxLen` (such as zero), which +will likely change the result of the program. This behavior is set to become an error in future +versions instead. + +Example: + +```rust +// Create a mapping from Fields to u32s with a maximum length of 12 +// using a poseidon2 hasher +use dep::std::hash::poseidon2::Poseidon2Hasher; +let mut map: HashMap> = HashMap::default(); + +map.insert(1, 2); +map.insert(3, 4); + +let two = map.get(1).unwrap(); +``` + +## Methods + +### default + +```rust title="default" showLineNumbers +impl Default for HashMap +where + B: BuildHasher + Default, + H: Hasher + Default +{ + fn default() -> Self { +``` +> Source code: noir_stdlib/src/collections/map.nr#L462-L469 + + +Creates a fresh, empty HashMap. + +When using this function, always make sure to specify the maximum size of the hash map. + +This is the same `default` from the `Default` implementation given further below. It is +repeated here for convenience since it is the recommended way to create a hashmap. + +Example: + +```rust title="default_example" showLineNumbers +let hashmap: HashMap> = HashMap::default(); + assert(hashmap.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L202-L205 + + +Because `HashMap` has so many generic arguments that are likely to be the same throughout +your program, it may be helpful to create a type alias: + +```rust title="type_alias" showLineNumbers +type MyMap = HashMap>; +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L196-L198 + + +### with_hasher + +```rust title="with_hasher" showLineNumbers +pub fn with_hasher(_build_hasher: B) -> Self + where + B: BuildHasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L82-L86 + + +Creates a hashmap with an existing `BuildHasher`. This can be used to ensure multiple +hashmaps are created with the same hasher instance. + +Example: + +```rust title="with_hasher_example" showLineNumbers +let my_hasher: BuildHasherDefault = Default::default(); + let hashmap: HashMap> = HashMap::with_hasher(my_hasher); + assert(hashmap.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L207-L211 + + +### get + +```rust title="get" showLineNumbers +pub fn get( + self, + key: K + ) -> Option + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L278-L287 + + +Retrieves a value from the hashmap, returning `Option::none()` if it was not found. + +Example: + +```rust title="get_example" showLineNumbers +fn get_example(map: HashMap>) { + let x = map.get(12); + + if x.is_some() { + assert(x.unwrap() == 42); + } +} +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L299-L307 + + +### insert + +```rust title="insert" showLineNumbers +pub fn insert( + &mut self, + key: K, + value: V + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L313-L323 + + +Inserts a new key-value pair into the map. If the key was already in the map, its +previous value will be overridden with the newly provided one. + +Example: + +```rust title="insert_example" showLineNumbers +let mut map: HashMap> = HashMap::default(); + map.insert(12, 42); + assert(map.len() == 1); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L213-L217 + + +### remove + +```rust title="remove" showLineNumbers +pub fn remove( + &mut self, + key: K + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L356-L365 + + +Removes the given key-value pair from the map. If the key was not already present +in the map, this does nothing. + +Example: + +```rust title="remove_example" showLineNumbers +map.remove(12); + assert(map.is_empty()); + + // If a key was not present in the map, remove does nothing + map.remove(12); + assert(map.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L221-L228 + + +### is_empty + +```rust title="is_empty" showLineNumbers +pub fn is_empty(self) -> bool { +``` +> Source code: noir_stdlib/src/collections/map.nr#L115-L117 + + +True if the length of the hash map is empty. + +Example: + +```rust title="is_empty_example" showLineNumbers +assert(map.is_empty()); + + map.insert(1, 2); + assert(!map.is_empty()); + + map.remove(1); + assert(map.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L230-L238 + + +### len + +```rust title="len" showLineNumbers +pub fn len(self) -> u64 { +``` +> Source code: noir_stdlib/src/collections/map.nr#L264-L266 + + +Returns the current length of this hash map. + +Example: + +```rust title="len_example" showLineNumbers +// This is equivalent to checking map.is_empty() + assert(map.len() == 0); + + map.insert(1, 2); + map.insert(3, 4); + map.insert(5, 6); + assert(map.len() == 3); + + // 3 was already present as a key in the hash map, so the length is unchanged + map.insert(3, 7); + assert(map.len() == 3); + + map.remove(1); + assert(map.len() == 2); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L240-L255 + + +### capacity + +```rust title="capacity" showLineNumbers +pub fn capacity(_self: Self) -> u64 { +``` +> Source code: noir_stdlib/src/collections/map.nr#L271-L273 + + +Returns the maximum capacity of this hashmap. This is always equal to the capacity +specified in the hashmap's type. + +Unlike hashmaps in general purpose programming languages, hashmaps in Noir have a +static capacity that does not increase as the map grows larger. Thus, this capacity +is also the maximum possible element count that can be inserted into the hashmap. +Due to hash collisions (modulo the hashmap length), it is likely the actual maximum +element count will be lower than the full capacity. + +Example: + +```rust title="capacity_example" showLineNumbers +let empty_map: HashMap> = HashMap::default(); + assert(empty_map.len() == 0); + assert(empty_map.capacity() == 42); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L257-L261 + + +### clear + +```rust title="clear" showLineNumbers +pub fn clear(&mut self) { +``` +> Source code: noir_stdlib/src/collections/map.nr#L93-L95 + + +Clears the hashmap, removing all key-value pairs from it. + +Example: + +```rust title="clear_example" showLineNumbers +assert(!map.is_empty()); + map.clear(); + assert(map.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L263-L267 + + +### contains_key + +```rust title="contains_key" showLineNumbers +pub fn contains_key( + self, + key: K + ) -> bool + where + K: Hash + Eq, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L101-L110 + + +True if the hashmap contains the given key. Unlike `get`, this will not also return +the value associated with the key. + +Example: + +```rust title="contains_key_example" showLineNumbers +if map.contains_key(7) { + let value = map.get(7); + assert(value.is_some()); + } else { + println("No value for key 7!"); + } +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L269-L276 + + +### entries + +```rust title="entries" showLineNumbers +pub fn entries(self) -> BoundedVec<(K, V), N> { +``` +> Source code: noir_stdlib/src/collections/map.nr#L123-L125 + + +Returns a vector of each key-value pair present in the hashmap. + +The length of the returned vector is always equal to the length of the hashmap. + +Example: + +```rust title="entries_example" showLineNumbers +let entries = map.entries(); + + // The length of a hashmap may not be compile-time known, so we + // need to loop over its capacity instead + for i in 0..map.capacity() { + if i < entries.len() { + let (key, value) = entries.get(i); + println(f"{key} -> {value}"); + } + } +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L310-L321 + + +### keys + +```rust title="keys" showLineNumbers +pub fn keys(self) -> BoundedVec { +``` +> Source code: noir_stdlib/src/collections/map.nr#L144-L146 + + +Returns a vector of each key present in the hashmap. + +The length of the returned vector is always equal to the length of the hashmap. + +Example: + +```rust title="keys_example" showLineNumbers +let keys = map.keys(); + + for i in 0..keys.max_len() { + if i < keys.len() { + let key = keys.get_unchecked(i); + let value = map.get(key).unwrap_unchecked(); + println(f"{key} -> {value}"); + } + } +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L323-L333 + + +### values + +```rust title="values" showLineNumbers +pub fn values(self) -> BoundedVec { +``` +> Source code: noir_stdlib/src/collections/map.nr#L164-L166 + + +Returns a vector of each value present in the hashmap. + +The length of the returned vector is always equal to the length of the hashmap. + +Example: + +```rust title="values_example" showLineNumbers +let values = map.values(); + + for i in 0..values.max_len() { + if i < values.len() { + let value = values.get_unchecked(i); + println(f"Found value {value}"); + } + } +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L335-L344 + + +### iter_mut + +```rust title="iter_mut" showLineNumbers +pub fn iter_mut( + &mut self, + f: fn(K, V) -> (K, V) + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L183-L192 + + +Iterates through each key-value pair of the HashMap, setting each key-value pair to the +result returned from the given function. + +Note that since keys can be mutated, the HashMap needs to be rebuilt as it is iterated +through. If this is not desired, use `iter_values_mut` if only values need to be mutated, +or `entries` if neither keys nor values need to be mutated. + +The iteration order is left unspecified. As a result, if two keys are mutated to become +equal, which of the two values that will be present for the key in the resulting map is also unspecified. + +Example: + +```rust title="iter_mut_example" showLineNumbers +// Add 1 to each key in the map, and double the value associated with that key. + map.iter_mut(|k, v| (k + 1, v * 2)); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L348-L351 + + +### iter_keys_mut + +```rust title="iter_keys_mut" showLineNumbers +pub fn iter_keys_mut( + &mut self, + f: fn(K) -> K + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { +``` +> Source code: noir_stdlib/src/collections/map.nr#L208-L217 + + +Iterates through the HashMap, mutating each key to the result returned from +the given function. + +Note that since keys can be mutated, the HashMap needs to be rebuilt as it is iterated +through. If only iteration is desired and the keys are not intended to be mutated, +prefer using `entries` instead. + +The iteration order is left unspecified. As a result, if two keys are mutated to become +equal, which of the two values that will be present for the key in the resulting map is also unspecified. + +Example: + +```rust title="iter_keys_mut_example" showLineNumbers +// Double each key, leaving the value associated with that key untouched + map.iter_keys_mut(|k| k * 2); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L353-L356 + + +### iter_values_mut + +```rust title="iter_values_mut" showLineNumbers +pub fn iter_values_mut(&mut self, f: fn(V) -> V) { +``` +> Source code: noir_stdlib/src/collections/map.nr#L233-L235 + + +Iterates through the HashMap, applying the given function to each value and mutating the +value to equal the result. This function is more efficient than `iter_mut` and `iter_keys_mut` +because the keys are untouched and the underlying hashmap thus does not need to be reordered. + +Example: + +```rust title="iter_values_mut_example" showLineNumbers +// Halve each value + map.iter_values_mut(|v| v / 2); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L358-L361 + + +### retain + +```rust title="retain" showLineNumbers +pub fn retain(&mut self, f: fn(K, V) -> bool) { +``` +> Source code: noir_stdlib/src/collections/map.nr#L247-L249 + + +Retains only the key-value pairs for which the given function returns true. +Any key-value pairs for which the function returns false will be removed from the map. + +Example: + +```rust title="retain_example" showLineNumbers +map.retain(|k, v| (k != 0) & (v != 0)); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L281-L283 + + +## Trait Implementations + +### default + +```rust title="default" showLineNumbers +impl Default for HashMap +where + B: BuildHasher + Default, + H: Hasher + Default +{ + fn default() -> Self { +``` +> Source code: noir_stdlib/src/collections/map.nr#L462-L469 + + +Constructs an empty HashMap. + +Example: + +```rust title="default_example" showLineNumbers +let hashmap: HashMap> = HashMap::default(); + assert(hashmap.is_empty()); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L202-L205 + + +### eq + +```rust title="eq" showLineNumbers +impl Eq for HashMap +where + K: Eq + Hash, + V: Eq, + B: BuildHasher, + H: Hasher +{ + fn eq(self, other: HashMap) -> bool { +``` +> Source code: noir_stdlib/src/collections/map.nr#L426-L435 + + +Checks if two HashMaps are equal. + +Example: + +```rust title="eq_example" showLineNumbers +let mut map1: HashMap> = HashMap::default(); + let mut map2: HashMap> = HashMap::default(); + + map1.insert(1, 2); + map1.insert(3, 4); + + map2.insert(3, 4); + map2.insert(1, 2); + + assert(map1 == map2); +``` +> Source code: test_programs/execution_success/hashmap/src/main.nr#L285-L296 + diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/containers/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/containers/index.md new file mode 100644 index 00000000000..ea84c6d5c21 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/containers/index.md @@ -0,0 +1,5 @@ +--- +title: Containers +description: Container types provided by Noir's standard library for storing and retrieving data +keywords: [containers, data types, vec, hashmap] +--- diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/containers/vec.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/containers/vec.mdx new file mode 100644 index 00000000000..fcfd7e07aa0 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/containers/vec.mdx @@ -0,0 +1,151 @@ +--- +title: Vectors +description: Delve into the Vec data type in Noir. Learn about its methods, practical examples, and best practices for using Vectors in your Noir code. +keywords: [noir, vector type, methods, examples, dynamic arrays] +sidebar_position: 6 +--- + +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + + +A vector is a collection type similar to Rust's `Vec` type. In Noir, it is a convenient way to use slices as mutable arrays. + +Example: + +```rust +let mut vector: Vec = Vec::new(); +for i in 0..5 { + vector.push(i); +} +assert(vector.len() == 5); +``` + +## Methods + +### new + +Creates a new, empty vector. + +```rust +pub fn new() -> Self +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` + +### from_slice + +Creates a vector containing each element from a given slice. Mutations to the resulting vector will not affect the original slice. + +```rust +pub fn from_slice(slice: [T]) -> Self +``` + +Example: + +```rust +let slice: [Field] = &[1, 2, 3]; +let vector_from_slice = Vec::from_slice(slice); +assert(vector_from_slice.len() == 3); +``` + +### len + +Returns the number of elements in the vector. + +```rust +pub fn len(self) -> Field +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` + +### get + +Retrieves an element from the vector at a given index. Panics if the index points beyond the vector's end. + +```rust +pub fn get(self, index: Field) -> T +``` + +Example: + +```rust +let vector: Vec = Vec::from_slice(&[10, 20, 30]); +assert(vector.get(1) == 20); +``` + +### push + +Adds a new element to the vector's end, returning a new vector with a length one greater than the original unmodified vector. + +```rust +pub fn push(&mut self, elem: T) +``` + +Example: + +```rust +let mut vector: Vec = Vec::new(); +vector.push(10); +assert(vector.len() == 1); +``` + +### pop + +Removes an element from the vector's end, returning a new vector with a length one less than the original vector, along with the removed element. Panics if the vector's length is zero. + +```rust +pub fn pop(&mut self) -> T +``` + +Example: + +```rust +let mut vector = Vec::from_slice(&[10, 20]); +let popped_elem = vector.pop(); +assert(popped_elem == 20); +assert(vector.len() == 1); +``` + +### insert + +Inserts an element at a specified index, shifting subsequent elements to the right. + +```rust +pub fn insert(&mut self, index: Field, elem: T) +``` + +Example: + +```rust +let mut vector = Vec::from_slice(&[10, 30]); +vector.insert(1, 20); +assert(vector.get(1) == 20); +``` + +### remove + +Removes an element at a specified index, shifting subsequent elements to the left, and returns the removed element. + +```rust +pub fn remove(&mut self, index: Field) -> T +``` + +Example: + +```rust +let mut vector = Vec::from_slice(&[10, 20, 30]); +let removed_elem = vector.remove(1); +assert(removed_elem == 20); +assert(vector.len() == 2); +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/_category_.json new file mode 100644 index 00000000000..5d694210bbf --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 0, + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/ec_primitives.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/ec_primitives.md new file mode 100644 index 00000000000..d2b42d67b7c --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/ec_primitives.md @@ -0,0 +1,102 @@ +--- +title: Elliptic Curve Primitives +keywords: [cryptographic primitives, Noir project] +sidebar_position: 4 +--- + +Data structures and methods on them that allow you to carry out computations involving elliptic +curves over the (mathematical) field corresponding to `Field`. For the field currently at our +disposal, applications would involve a curve embedded in BN254, e.g. the +[Baby Jubjub curve](https://eips.ethereum.org/EIPS/eip-2494). + +## Data structures + +### Elliptic curve configurations + +(`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::Curve`), i.e. the specific elliptic +curve you want to use, which would be specified using any one of the methods +`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::new` which take the coefficients in the +defining equation together with a generator point as parameters. You can find more detail in the +comments in +[`noir_stdlib/src/ec.nr`](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec.nr), but +the gist of it is that the elliptic curves of interest are usually expressed in one of the standard +forms implemented here (Twisted Edwards, Montgomery and Short Weierstraß), and in addition to that, +you could choose to use `affine` coordinates (Cartesian coordinates - the usual (x,y) - possibly +together with a point at infinity) or `curvegroup` coordinates (some form of projective coordinates +requiring more coordinates but allowing for more efficient implementations of elliptic curve +operations). Conversions between all of these forms are provided, and under the hood these +conversions are done whenever an operation is more efficient in a different representation (or a +mixed coordinate representation is employed). + +### Points + +(`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::Point`), i.e. points lying on the +elliptic curve. For a curve configuration `c` and a point `p`, it may be checked that `p` +does indeed lie on `c` by calling `c.contains(p1)`. + +## Methods + +(given a choice of curve representation, e.g. use `std::ec::tecurve::affine::Curve` and use +`std::ec::tecurve::affine::Point`) + +- The **zero element** is given by `Point::zero()`, and we can verify whether a point `p: Point` is + zero by calling `p.is_zero()`. +- **Equality**: Points `p1: Point` and `p2: Point` may be checked for equality by calling + `p1.eq(p2)`. +- **Addition**: For `c: Curve` and points `p1: Point` and `p2: Point` on the curve, adding these two + points is accomplished by calling `c.add(p1,p2)`. +- **Negation**: For a point `p: Point`, `p.negate()` is its negation. +- **Subtraction**: For `c` and `p1`, `p2` as above, subtracting `p2` from `p1` is accomplished by + calling `c.subtract(p1,p2)`. +- **Scalar multiplication**: For `c` as above, `p: Point` a point on the curve and `n: Field`, + scalar multiplication is given by `c.mul(n,p)`. If instead `n :: [u1; N]`, i.e. `n` is a bit + array, the `bit_mul` method may be used instead: `c.bit_mul(n,p)` +- **Multi-scalar multiplication**: For `c` as above and arrays `n: [Field; N]` and `p: [Point; N]`, + multi-scalar multiplication is given by `c.msm(n,p)`. +- **Coordinate representation conversions**: The `into_group` method converts a point or curve + configuration in the affine representation to one in the CurveGroup representation, and + `into_affine` goes in the other direction. +- **Curve representation conversions**: `tecurve` and `montcurve` curves and points are equivalent + and may be converted between one another by calling `into_montcurve` or `into_tecurve` on their + configurations or points. `swcurve` is more general and a curve c of one of the other two types + may be converted to this representation by calling `c.into_swcurve()`, whereas a point `p` lying + on the curve given by `c` may be mapped to its corresponding `swcurve` point by calling + `c.map_into_swcurve(p)`. +- **Map-to-curve methods**: The Elligator 2 method of mapping a field element `n: Field` into a + `tecurve` or `montcurve` with configuration `c` may be called as `c.elligator2_map(n)`. For all of + the curve configurations, the SWU map-to-curve method may be called as `c.swu_map(z,n)`, where + `z: Field` depends on `Field` and `c` and must be chosen by the user (the conditions it needs to + satisfy are specified in the comments + [here](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec.nr)). + +## Examples + +The +[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/test_programs/compile_success_empty/ec_baby_jubjub/src/main.nr) +illustrates all of the above primitives on various forms of the Baby Jubjub curve. A couple of more +interesting examples in Noir would be: + +Public-key cryptography: Given an elliptic curve and a 'base point' on it, determine the public key +from the private key. This is a matter of using scalar multiplication. In the case of Baby Jubjub, +for example, this code would do: + +```rust +use dep::std::ec::tecurve::affine::{Curve, Point}; + +fn bjj_pub_key(priv_key: Field) -> Point +{ + + let bjj = Curve::new(168700, 168696, G::new(995203441582195749578291179787384436505546430278305826713579947235728471134,5472060717959818805561601436314318772137091100104008585924551046643952123905)); + + let base_pt = Point::new(5299619240641551281634865583518297030282874472190772894086521144482721001553, 16950150798460657717958625567821834550301663161624707787222815936182638968203); + + bjj.mul(priv_key,base_pt) +} +``` + +This would come in handy in a Merkle proof. + +- EdDSA signature verification: This is a matter of combining these primitives with a suitable hash + function. See + [feat(stdlib): EdDSA sig verification noir#1136](https://github.com/noir-lang/noir/pull/1136) for + the case of Baby Jubjub and the Poseidon hash function. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx new file mode 100644 index 00000000000..4394b48f907 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx @@ -0,0 +1,98 @@ +--- +title: ECDSA Signature Verification +description: Learn about the cryptographic primitives regarding ECDSA over the secp256k1 and secp256r1 curves +keywords: [cryptographic primitives, Noir project, ecdsa, secp256k1, secp256r1, signatures] +sidebar_position: 3 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +Noir supports ECDSA signatures verification over the secp256k1 and secp256r1 curves. + +## ecdsa_secp256k1::verify_signature + +Verifier for ECDSA Secp256k1 signatures. +See ecdsa_secp256k1::verify_signature_slice for a version that accepts slices directly. + +```rust title="ecdsa_secp256k1" showLineNumbers +pub fn verify_signature( + public_key_x: [u8; 32], + public_key_y: [u8; 32], + signature: [u8; 64], + message_hash: [u8; N] +) -> bool +``` +> Source code: noir_stdlib/src/ecdsa_secp256k1.nr#L2-L9 + + +example: + +```rust +fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { + let valid_signature = std::ecdsa_secp256k1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(valid_signature); +} +``` + + + +## ecdsa_secp256k1::verify_signature_slice + +Verifier for ECDSA Secp256k1 signatures where the message is a slice. + +```rust title="ecdsa_secp256k1_slice" showLineNumbers +pub fn verify_signature_slice( + public_key_x: [u8; 32], + public_key_y: [u8; 32], + signature: [u8; 64], + message_hash: [u8] +) -> bool +``` +> Source code: noir_stdlib/src/ecdsa_secp256k1.nr#L13-L20 + + + + +## ecdsa_secp256r1::verify_signature + +Verifier for ECDSA Secp256r1 signatures. +See ecdsa_secp256r1::verify_signature_slice for a version that accepts slices directly. + +```rust title="ecdsa_secp256r1" showLineNumbers +pub fn verify_signature( + public_key_x: [u8; 32], + public_key_y: [u8; 32], + signature: [u8; 64], + message_hash: [u8; N] +) -> bool +``` +> Source code: noir_stdlib/src/ecdsa_secp256r1.nr#L2-L9 + + +example: + +```rust +fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { + let valid_signature = std::ecdsa_secp256r1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(valid_signature); +} +``` + + + +## ecdsa_secp256r1::verify_signature + +Verifier for ECDSA Secp256r1 signatures where the message is a slice. + +```rust title="ecdsa_secp256r1_slice" showLineNumbers +pub fn verify_signature_slice( + public_key_x: [u8; 32], + public_key_y: [u8; 32], + signature: [u8; 64], + message_hash: [u8] +) -> bool +``` +> Source code: noir_stdlib/src/ecdsa_secp256r1.nr#L13-L20 + + + diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/eddsa.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/eddsa.mdx new file mode 100644 index 00000000000..c2c0624dfad --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/eddsa.mdx @@ -0,0 +1,37 @@ +--- +title: EdDSA Verification +description: Learn about the cryptographic primitives regarding EdDSA +keywords: [cryptographic primitives, Noir project, eddsa, signatures] +sidebar_position: 5 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## eddsa::eddsa_poseidon_verify + +Verifier for EdDSA signatures + +```rust +fn eddsa_poseidon_verify(public_key_x : Field, public_key_y : Field, signature_s: Field, signature_r8_x: Field, signature_r8_y: Field, message: Field) -> bool +``` + +It is also possible to specify the hash algorithm used for the signature by using the `eddsa_verify_with_hasher` function with a parameter implementing the Hasher trait. For instance, if you want to use Poseidon2 instead, you can do the following: +```rust +use dep::std::hash::poseidon2::Poseidon2Hasher; + +let mut hasher = Poseidon2Hasher::default(); +eddsa_verify_with_hasher(pub_key_a.x, pub_key_a.y, s_a, r8_a.x, r8_a.y, msg, &mut hasher); +``` + + + +## eddsa::eddsa_to_pub + +Private to public key conversion. + +Returns `(pub_key_x, pub_key_y)` + +```rust +fn eddsa_to_pub(secret : Field) -> (Field, Field) +``` + diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/hashes.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/hashes.mdx new file mode 100644 index 00000000000..87e113b96cd --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/hashes.mdx @@ -0,0 +1,250 @@ +--- +title: Hash methods +description: + Learn about the cryptographic primitives ready to use for any Noir project, including sha256, + blake2s, pedersen, mimc_bn254 and mimc +keywords: + [cryptographic primitives, Noir project, sha256, blake2s, pedersen, mimc_bn254, mimc, hash] +sidebar_position: 0 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## sha256 + +Given an array of bytes, returns the resulting sha256 hash. + +```rust title="sha256" showLineNumbers +pub fn sha256(input: [u8; N]) -> [u8; 32] +``` +> Source code: noir_stdlib/src/hash.nr#L9-L11 + + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::sha256(x); +} +``` + + + +## blake2s + +Given an array of bytes, returns an array with the Blake2 hash + +```rust title="blake2s" showLineNumbers +pub fn blake2s(input: [u8; N]) -> [u8; 32] +``` +> Source code: noir_stdlib/src/hash.nr#L15-L17 + + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::blake2s(x); +} +``` + + + +## blake3 + +Given an array of bytes, returns an array with the Blake3 hash + +```rust title="blake3" showLineNumbers +pub fn blake3(input: [u8; N]) -> [u8; 32] +``` +> Source code: noir_stdlib/src/hash.nr#L21-L23 + + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::blake3(x); +} +``` + + + +## pedersen_hash + +Given an array of Fields, returns the Pedersen hash. + +```rust title="pedersen_hash" showLineNumbers +pub fn pedersen_hash(input: [Field; N]) -> Field +``` +> Source code: noir_stdlib/src/hash.nr#L45-L47 + + +example: + +```rust title="pedersen-hash" showLineNumbers +use dep::std; + +fn main(x: Field, y: Field, expected_hash: Field) { + let hash = std::hash::pedersen_hash([x, y]); + assert_eq(hash, expected_hash); +} +``` +> Source code: test_programs/execution_success/pedersen_hash/src/main.nr#L1-L8 + + + + +## pedersen_commitment + +Given an array of Fields, returns the Pedersen commitment. + +```rust title="pedersen_commitment" showLineNumbers +struct PedersenPoint { + x : Field, + y : Field, +} + +pub fn pedersen_commitment(input: [Field; N]) -> PedersenPoint { +``` +> Source code: noir_stdlib/src/hash.nr#L26-L33 + + +example: + +```rust title="pedersen-commitment" showLineNumbers +use dep::std; + +fn main(x: Field, y: Field, expected_commitment: std::hash::PedersenPoint) { + let commitment = std::hash::pedersen_commitment([x, y]); + assert_eq(commitment.x, expected_commitment.x); + assert_eq(commitment.y, expected_commitment.y); +} +``` +> Source code: test_programs/execution_success/pedersen_commitment/src/main.nr#L1-L9 + + + + +## keccak256 + +Given an array of bytes (`u8`), returns the resulting keccak hash as an array of +32 bytes (`[u8; 32]`). Specify a message_size to hash only the first +`message_size` bytes of the input. + +```rust title="keccak256" showLineNumbers +pub fn keccak256(input: [u8; N], message_size: u32) -> [u8; 32] +``` +> Source code: noir_stdlib/src/hash.nr#L67-L69 + + +example: + +```rust title="keccak256" showLineNumbers +use dep::std; + +fn main(x: Field, result: [u8; 32]) { + // We use the `as` keyword here to denote the fact that we want to take just the first byte from the x Field + // The padding is taken care of by the program + let digest = std::hash::keccak256([x as u8], 1); + assert(digest == result); + + //#1399: variable message size + let message_size = 4; + let hash_a = std::hash::keccak256([1, 2, 3, 4], message_size); + let hash_b = std::hash::keccak256([1, 2, 3, 4, 0, 0, 0, 0], message_size); + + assert(hash_a == hash_b); + + let message_size_big = 8; + let hash_c = std::hash::keccak256([1, 2, 3, 4, 0, 0, 0, 0], message_size_big); + + assert(hash_a != hash_c); +} +``` +> Source code: test_programs/execution_success/keccak256/src/main.nr#L1-L22 + + + + +## poseidon + +Given an array of Fields, returns a new Field with the Poseidon Hash. Mind that you need to specify +how many inputs are there to your Poseidon function. + +```rust +// example for hash_1, hash_2 accepts an array of length 2, etc +fn hash_1(input: [Field; 1]) -> Field +``` + +example: + +```rust title="poseidon" showLineNumbers +use dep::std::hash::poseidon; +use dep::std::hash::poseidon2; + +fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field, x3: [Field; 4], y3: Field) { + let hash1 = poseidon::bn254::hash_2(x1); + assert(hash1 == y1); + + let hash2 = poseidon::bn254::hash_4(x2); + assert(hash2 == y2); + + let hash3 = poseidon2::Poseidon2::hash(x3, x3.len()); + assert(hash3 == y3); +} +``` +> Source code: test_programs/execution_success/poseidon_bn254_hash/src/main.nr#L1-L15 + + +## poseidon 2 + +Given an array of Fields, returns a new Field with the Poseidon2 Hash. Contrary to the Poseidon +function, there is only one hash and you can specify a message_size to hash only the first +`message_size` bytes of the input, + +```rust +// example for hashing the first three elements of the input +Poseidon2::hash(input, 3); +``` + +The above example for Poseidon also includes Poseidon2. + +## mimc_bn254 and mimc + +`mimc_bn254` is `mimc`, but with hardcoded parameters for the BN254 curve. You can use it by +providing an array of Fields, and it returns a Field with the hash. You can use the `mimc` method if +you're willing to input your own constants: + +```rust +fn mimc(x: Field, k: Field, constants: [Field; N], exp : Field) -> Field +``` + +otherwise, use the `mimc_bn254` method: + +```rust +fn mimc_bn254(array: [Field; N]) -> Field +``` + +example: + +```rust + +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::mimc::mimc_bn254(x); +} +``` + +## hash_to_field + +```rust +fn hash_to_field(_input : [Field]) -> Field {} +``` + +Calculates the `blake2s` hash of the inputs and returns the hash modulo the field modulus to return +a value which can be represented as a `Field`. + diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/index.md new file mode 100644 index 00000000000..650f30165d5 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/index.md @@ -0,0 +1,14 @@ +--- +title: Cryptographic Primitives +description: + Learn about the cryptographic primitives ready to use for any Noir project +keywords: + [ + cryptographic primitives, + Noir project, + ] +--- + +The Noir team is progressively adding new cryptographic primitives to the standard library. Reach out for news or if you would be interested in adding more of these calculations in Noir. + +Some methods are available thanks to the Aztec backend, not being performed using Noir. When using other backends, these methods may or may not be supplied. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/scalar.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/scalar.mdx new file mode 100644 index 00000000000..df411ca5443 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/scalar.mdx @@ -0,0 +1,33 @@ +--- +title: Scalar multiplication +description: See how you can perform scalar multiplications over a fixed base in Noir +keywords: [cryptographic primitives, Noir project, scalar multiplication] +sidebar_position: 1 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## scalar_mul::fixed_base_embedded_curve + +Performs scalar multiplication over the embedded curve whose coordinates are defined by the +configured noir field. For the BN254 scalar field, this is BabyJubJub or Grumpkin. + +```rust title="fixed_base_embedded_curve" showLineNumbers +pub fn fixed_base_embedded_curve( + low: Field, + high: Field +) -> [Field; 2] +``` +> Source code: noir_stdlib/src/scalar_mul.nr#L27-L32 + + +example + +```rust +fn main(x : Field) { + let scal = std::scalar_mul::fixed_base_embedded_curve(x); + println(scal); +} +``` + + diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/schnorr.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/schnorr.mdx new file mode 100644 index 00000000000..b59e69c8f07 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/cryptographic_primitives/schnorr.mdx @@ -0,0 +1,64 @@ +--- +title: Schnorr Signatures +description: Learn how you can verify Schnorr signatures using Noir +keywords: [cryptographic primitives, Noir project, schnorr, signatures] +sidebar_position: 2 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## schnorr::verify_signature + +Verifier for Schnorr signatures over the embedded curve (for BN254 it is Grumpkin). +See schnorr::verify_signature_slice for a version that works directly on slices. + +```rust title="schnorr_verify" showLineNumbers +pub fn verify_signature( + public_key_x: Field, + public_key_y: Field, + signature: [u8; 64], + message: [u8; N] +) -> bool +``` +> Source code: noir_stdlib/src/schnorr.nr#L2-L9 + + +where `_signature` can be generated like so using the npm package +[@noir-lang/barretenberg](https://www.npmjs.com/package/@noir-lang/barretenberg) + +```js +const { BarretenbergWasm } = require('@noir-lang/barretenberg/dest/wasm'); +const { Schnorr } = require('@noir-lang/barretenberg/dest/crypto/schnorr'); + +... + +const barretenberg = await BarretenbergWasm.new(); +const schnorr = new Schnorr(barretenberg); +const pubKey = schnorr.computePublicKey(privateKey); +const message = ... +const signature = Array.from( + schnorr.constructSignature(hash, privateKey).toBuffer() +); + +... +``` + + + +## schnorr::verify_signature_slice + +Verifier for Schnorr signatures over the embedded curve (for BN254 it is Grumpkin) +where the message is a slice. + +```rust title="schnorr_verify_slice" showLineNumbers +pub fn verify_signature_slice( + public_key_x: Field, + public_key_y: Field, + signature: [u8; 64], + message: [u8] +) -> bool +``` +> Source code: noir_stdlib/src/schnorr.nr#L13-L20 + + + diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/logging.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/logging.md new file mode 100644 index 00000000000..db75ef9f86f --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/logging.md @@ -0,0 +1,78 @@ +--- +title: Logging +description: + Learn how to use the println statement for debugging in Noir with this tutorial. Understand the + basics of logging in Noir and how to implement it in your code. +keywords: + [ + noir logging, + println statement, + print statement, + debugging in noir, + noir std library, + logging tutorial, + basic logging in noir, + noir logging implementation, + noir debugging techniques, + rust, + ] +--- + +The standard library provides two familiar statements you can use: `println` and `print`. Despite being a limited implementation of rust's `println!` and `print!` macros, these constructs can be useful for debugging. + +You can print the output of both statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are print statements in your tests). + +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` or `print` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). Neither `println`, nor `print` are callable for failed constraints caught at compile time. + +Both `print` and `println` are generic functions which can work on integers, fields, strings, and even structs or expressions. Note however, that slices are currently unsupported. For example: + +```rust +struct Person { + age: Field, + height: Field, +} + +fn main(age: Field, height: Field) { + let person = Person { + age: age, + height: height, + }; + println(person); + println(age + height); + println("Hello world!"); +} +``` + +You can print different types in the same statement (including strings) with a type called `fmtstr`. It can be specified in the same way as a normal string, just prepended with an "f" character: + +```rust + let fmt_str = f"i: {i}, j: {j}"; + println(fmt_str); + + let s = myStruct { y: x, x: y }; + println(s); + + println(f"i: {i}, s: {s}"); + + println(x); + println([x, y]); + + let foo = fooStruct { my_struct: s, foo: 15 }; + println(f"s: {s}, foo: {foo}"); + + println(15); // prints 0x0f, implicit Field + println(-1 as u8); // prints 255 + println(-1 as i8); // prints -1 +``` + +Examples shown above are interchangeable between the two `print` statements: + +```rust +let person = Person { age : age, height : height }; + +println(person); +print(person); + +println("Hello world!"); // Prints with a newline at the end of the input +print("Hello world!"); // Prints the input and keeps cursor on the same line +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/merkle_trees.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/merkle_trees.md new file mode 100644 index 00000000000..6a9ebf72ada --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/merkle_trees.md @@ -0,0 +1,58 @@ +--- +title: Merkle Trees +description: Learn about Merkle Trees in Noir with this tutorial. Explore the basics of computing a merkle root using a proof, with examples. +keywords: + [ + Merkle trees in Noir, + Noir programming language, + check membership, + computing root from leaf, + Noir Merkle tree implementation, + Merkle tree tutorial, + Merkle tree code examples, + Noir libraries, + pedersen hash., + ] +--- + +## compute_merkle_root + +Returns the root of the tree from the provided leaf and its hash path, using a [Pedersen hash](./cryptographic_primitives/hashes.mdx#pedersen_hash). + +```rust +fn compute_merkle_root(leaf : Field, index : Field, hash_path: [Field]) -> Field +``` + +example: + +```rust +/** + // these values are for this example only + index = "0" + priv_key = "0x000000000000000000000000000000000000000000000000000000616c696365" + secret = "0x1929ea3ab8d9106a899386883d9428f8256cfedb3c4f6b66bf4aa4d28a79988f" + note_hash_path = [ + "0x1e61bdae0f027b1b2159e1f9d3f8d00fa668a952dddd822fda80dc745d6f65cc", + "0x0e4223f3925f98934393c74975142bd73079ab0621f4ee133cee050a3c194f1a", + "0x2fd7bb412155bf8693a3bd2a3e7581a679c95c68a052f835dddca85fa1569a40" + ] + */ +fn main(index: Field, priv_key: Field, secret: Field, note_hash_path: [Field; 3]) { + + let pubkey = std::scalar_mul::fixed_base_embedded_curve(priv_key); + let pubkey_x = pubkey[0]; + let pubkey_y = pubkey[1]; + let note_commitment = std::hash::pedersen(&[pubkey_x, pubkey_y, secret]); + + let root = std::merkle::compute_merkle_root(note_commitment[0], index, note_hash_path.as_slice()); + println(root); +} +``` + +To check merkle tree membership: + +1. Include a merkle root as a program input. +2. Compute the merkle root of a given leaf, index and hash path. +3. Assert the merkle roots are equal. + +For more info about merkle trees, see the Wikipedia [page](https://en.wikipedia.org/wiki/Merkle_tree). diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/options.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/options.md new file mode 100644 index 00000000000..a1bd4e1de5f --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/options.md @@ -0,0 +1,101 @@ +--- +title: Option Type +--- + +The `Option` type is a way to express that a value might be present (`Some(T))` or absent (`None`). It's a safer way to handle potential absence of values, compared to using nulls in many other languages. + +```rust +struct Option { + None, + Some(T), +} +``` + +The `Option` type, already imported into your Noir program, can be used directly: + +```rust +fn main() { + let none = Option::none(); + let some = Option::some(3); +} +``` + +See [this test](https://github.com/noir-lang/noir/blob/5cbfb9c4a06c8865c98ff2b594464b037d821a5c/crates/nargo_cli/tests/test_data/option/src/main.nr) for a more comprehensive set of examples of each of the methods described below. + +## Methods + +### none + +Constructs a none value. + +### some + +Constructs a some wrapper around a given value. + +### is_none + +Returns true if the Option is None. + +### is_some + +Returns true of the Option is Some. + +### unwrap + +Asserts `self.is_some()` and returns the wrapped value. + +### unwrap_unchecked + +Returns the inner value without asserting `self.is_some()`. This method can be useful within an if condition when we already know that `option.is_some()`. If the option is None, there is no guarantee what value will be returned, only that it will be of type T for an `Option`. + +### unwrap_or + +Returns the wrapped value if `self.is_some()`. Otherwise, returns the given default value. + +### unwrap_or_else + +Returns the wrapped value if `self.is_some()`. Otherwise, calls the given function to return a default value. + +### expect + +Asserts `self.is_some()` with a provided custom message and returns the contained `Some` value. The custom message is expected to be a format string. + +### map + +If self is `Some(x)`, this returns `Some(f(x))`. Otherwise, this returns `None`. + +### map_or + +If self is `Some(x)`, this returns `f(x)`. Otherwise, this returns the given default value. + +### map_or_else + +If self is `Some(x)`, this returns `f(x)`. Otherwise, this returns `default()`. + +### and + +Returns None if self is None. Otherwise, this returns `other`. + +### and_then + +If self is None, this returns None. Otherwise, this calls the given function with the Some value contained within self, and returns the result of that call. In some languages this function is called `flat_map` or `bind`. + +### or + +If self is Some, return self. Otherwise, return `other`. + +### or_else + +If self is Some, return self. Otherwise, return `default()`. + +### xor + +If only one of the two Options is Some, return that option. Otherwise, if both options are Some or both are None, None is returned. + +### filter + +Returns `Some(x)` if self is `Some(x)` and `predicate(x)` is true. Otherwise, this returns `None`. + +### flatten + +Flattens an `Option>` into a `Option`. This returns `None` if the outer Option is None. Otherwise, this returns the inner Option. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/recursion.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/recursion.md new file mode 100644 index 00000000000..a93894043dc --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/recursion.md @@ -0,0 +1,88 @@ +--- +title: Recursive Proofs +description: Learn about how to write recursive proofs in Noir. +keywords: [recursion, recursive proofs, verification_key, verify_proof] +--- + +Noir supports recursively verifying proofs, meaning you verify the proof of a Noir program in another Noir program. This enables creating proofs of arbitrary size by doing step-wise verification of smaller components of a large proof. + +Read [the explainer on recursion](../../explainers/explainer-recursion.md) to know more about this function and the [guide on how to use it.](../../how_to/how-to-recursion.md) + +## The `#[recursive]` Attribute + +In Noir, the `#[recursive]` attribute is used to indicate that a circuit is designed for recursive proof generation. When applied, it informs the compiler and the tooling that the circuit should be compiled in a way that makes its proofs suitable for recursive verification. This attribute eliminates the need for manual flagging of recursion at the tooling level, streamlining the proof generation process for recursive circuits. + +### Example usage with `#[recursive]` + +```rust +#[recursive] +fn main(x: Field, y: pub Field) { + assert(x == y, "x and y are not equal"); +} + +// This marks the circuit as recursion-friendly and indicates that proofs generated from this circuit +// are intended for recursive verification. +``` + +By incorporating this attribute directly in the circuit's definition, tooling like Nargo and NoirJS can automatically execute recursive-specific duties for Noir programs (e.g. recursive-friendly proof artifact generation) without additional flags or configurations. + +## Verifying Recursive Proofs + +```rust +#[foreign(recursive_aggregation)] +pub fn verify_proof(verification_key: [Field], proof: [Field], public_inputs: [Field], key_hash: Field) {} +``` + +:::info + +This is a black box function. Read [this section](./black_box_fns) to learn more about black box functions in Noir. + +::: + +## Example usage + +```rust +use dep::std; + +fn main( + verification_key : [Field; 114], + proof : [Field; 93], + public_inputs : [Field; 1], + key_hash : Field, + proof_b : [Field; 93], +) { + std::verify_proof( + verification_key.as_slice(), + proof.as_slice(), + public_inputs.as_slice(), + key_hash + ); + + std::verify_proof( + verification_key.as_slice(), + proof_b.as_slice(), + public_inputs.as_slice(), + key_hash + ); +} +``` + +You can see a full example of recursive proofs in [this example recursion demo repo](https://github.com/noir-lang/noir-examples/tree/master/recursion). + +## Parameters + +### `verification_key` + +The verification key for the zk program that is being verified. + +### `proof` + +The proof for the zk program that is being verified. + +### `public_inputs` + +These represent the public inputs of the proof we are verifying. + +### `key_hash` + +A key hash is used to check the validity of the verification key. The circuit implementing this opcode can use this hash to ensure that the key provided to the circuit matches the key produced by the circuit creator. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/traits.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/traits.md new file mode 100644 index 00000000000..94337e77a3e --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/traits.md @@ -0,0 +1,410 @@ +--- +title: Traits +description: Noir's stdlib provides a few commonly used traits. +keywords: [traits, trait, interface, protocol, default, add, eq] +--- + +## `std::default` + +### `std::default::Default` + +```rust title="default-trait" showLineNumbers +trait Default { + fn default() -> Self; +} +``` +> Source code: noir_stdlib/src/default.nr#L1-L5 + + +Constructs a default value of a type. + +Implementations: +```rust +impl Default for Field { .. } + +impl Default for i8 { .. } +impl Default for i16 { .. } +impl Default for i32 { .. } +impl Default for i64 { .. } + +impl Default for u8 { .. } +impl Default for u16 { .. } +impl Default for u32 { .. } +impl Default for u64 { .. } + +impl Default for () { .. } +impl Default for bool { .. } + +impl Default for [T; N] + where T: Default { .. } + +impl Default for [T] { .. } + +impl Default for (A, B) + where A: Default, B: Default { .. } + +impl Default for (A, B, C) + where A: Default, B: Default, C: Default { .. } + +impl Default for (A, B, C, D) + where A: Default, B: Default, C: Default, D: Default { .. } + +impl Default for (A, B, C, D, E) + where A: Default, B: Default, C: Default, D: Default, E: Default { .. } +``` + +For primitive integer types, the return value of `default` is `0`. Container +types such as arrays are filled with default values of their element type, +except slices whose length is unknown and thus defaulted to zero. + + +## `std::convert` + +### `std::convert::From` + +```rust title="from-trait" showLineNumbers +trait From { + fn from(input: T) -> Self; +} +``` +> Source code: noir_stdlib/src/convert.nr#L1-L5 + + +The `From` trait defines how to convert from a given type `T` to the type on which the trait is implemented. + +The Noir standard library provides a number of implementations of `From` between primitive types. +```rust title="from-impls" showLineNumbers +// Unsigned integers + +impl From for u32 { fn from(value: u8) -> u32 { value as u32 } } + +impl From for u64 { fn from(value: u8) -> u64 { value as u64 } } +impl From for u64 { fn from(value: u32) -> u64 { value as u64 } } + +impl From for Field { fn from(value: u8) -> Field { value as Field } } +impl From for Field { fn from(value: u32) -> Field { value as Field } } +impl From for Field { fn from(value: u64) -> Field { value as Field } } + +// Signed integers + +impl From for i32 { fn from(value: i8) -> i32 { value as i32 } } + +impl From for i64 { fn from(value: i8) -> i64 { value as i64 } } +impl From for i64 { fn from(value: i32) -> i64 { value as i64 } } + +// Booleans +impl From for u8 { fn from(value: bool) -> u8 { value as u8 } } +impl From for u32 { fn from(value: bool) -> u32 { value as u32 } } +impl From for u64 { fn from(value: bool) -> u64 { value as u64 } } +impl From for i8 { fn from(value: bool) -> i8 { value as i8 } } +impl From for i32 { fn from(value: bool) -> i32 { value as i32 } } +impl From for i64 { fn from(value: bool) -> i64 { value as i64 } } +impl From for Field { fn from(value: bool) -> Field { value as Field } } +``` +> Source code: noir_stdlib/src/convert.nr#L25-L52 + + +#### When to implement `From` + +As a general rule of thumb, `From` may be implemented in the [situations where it would be suitable in Rust](https://doc.rust-lang.org/std/convert/trait.From.html#when-to-implement-from): + +- The conversion is *infallible*: Noir does not provide an equivalent to Rust's `TryFrom`, if the conversion can fail then provide a named method instead. +- The conversion is *lossless*: semantically, it should not lose or discard information. For example, `u32: From` can losslessly convert any `u16` into a valid `u32` such that the original `u16` can be recovered. On the other hand, `u16: From` should not be implemented as `2**16` is a `u32` which cannot be losslessly converted into a `u16`. +- The conversion is *value-preserving*: the conceptual kind and meaning of the resulting value is the same, even though the Noir type and technical representation might be different. While it's possible to infallibly and losslessly convert a `u8` into a `str<2>` hex representation, `4u8` and `"04"` are too different for `str<2>: From` to be implemented. +- The conversion is *obvious*: it's the only reasonable conversion between the two types. If there's ambiguity on how to convert between them such that the same input could potentially map to two different values then a named method should be used. For instance rather than implementing `U128: From<[u8; 16]>`, the methods `U128::from_le_bytes` and `U128::from_be_bytes` are used as otherwise the endianness of the array would be ambiguous, resulting in two potential values of `U128` from the same byte array. + +One additional recommendation specific to Noir is: +- The conversion is *efficient*: it's relatively cheap to convert between the two types. Due to being a ZK DSL, it's more important to avoid unnecessary computation compared to Rust. If the implementation of `From` would encourage users to perform unnecessary conversion, resulting in additional proving time, then it may be preferable to expose functionality such that this conversion may be avoided. + +### `std::convert::Into` + +The `Into` trait is defined as the reciprocal of `From`. It should be easy to convince yourself that if we can convert to type `A` from type `B`, then it's possible to convert type `B` into type `A`. + +For this reason, implementing `From` on a type will automatically generate a matching `Into` implementation. One should always prefer implementing `From` over `Into` as implementing `Into` will not generate a matching `From` implementation. + +```rust title="into-trait" showLineNumbers +trait Into { + fn into(self) -> T; +} + +impl Into for U where T: From { + fn into(self) -> T { + T::from(self) + } +} +``` +> Source code: noir_stdlib/src/convert.nr#L13-L23 + + +`Into` is most useful when passing function arguments where the types don't quite match up with what the function expects. In this case, the compiler has enough type information to perform the necessary conversion by just appending `.into()` onto the arguments in question. + + +## `std::cmp` + +### `std::cmp::Eq` + +```rust title="eq-trait" showLineNumbers +trait Eq { + fn eq(self, other: Self) -> bool; +} +``` +> Source code: noir_stdlib/src/cmp.nr#L1-L5 + + +Returns `true` if `self` is equal to `other`. Implementing this trait on a type +allows the type to be used with `==` and `!=`. + +Implementations: +```rust +impl Eq for Field { .. } + +impl Eq for i8 { .. } +impl Eq for i16 { .. } +impl Eq for i32 { .. } +impl Eq for i64 { .. } + +impl Eq for u8 { .. } +impl Eq for u16 { .. } +impl Eq for u32 { .. } +impl Eq for u64 { .. } + +impl Eq for () { .. } +impl Eq for bool { .. } + +impl Eq for [T; N] + where T: Eq { .. } + +impl Eq for [T] + where T: Eq { .. } + +impl Eq for (A, B) + where A: Eq, B: Eq { .. } + +impl Eq for (A, B, C) + where A: Eq, B: Eq, C: Eq { .. } + +impl Eq for (A, B, C, D) + where A: Eq, B: Eq, C: Eq, D: Eq { .. } + +impl Eq for (A, B, C, D, E) + where A: Eq, B: Eq, C: Eq, D: Eq, E: Eq { .. } +``` + +### `std::cmp::Ord` + +```rust title="ord-trait" showLineNumbers +trait Ord { + fn cmp(self, other: Self) -> Ordering; +} +``` +> Source code: noir_stdlib/src/cmp.nr#L102-L106 + + +`a.cmp(b)` compares two values returning `Ordering::less()` if `a < b`, +`Ordering::equal()` if `a == b`, or `Ordering::greater()` if `a > b`. +Implementing this trait on a type allows `<`, `<=`, `>`, and `>=` to be +used on values of the type. + +`std::cmp` also provides `max` and `min` functions for any type which implements the `Ord` trait. + +Implementations: + +```rust +impl Ord for u8 { .. } +impl Ord for u16 { .. } +impl Ord for u32 { .. } +impl Ord for u64 { .. } + +impl Ord for i8 { .. } +impl Ord for i16 { .. } +impl Ord for i32 { .. } + +impl Ord for i64 { .. } + +impl Ord for () { .. } +impl Ord for bool { .. } + +impl Ord for [T; N] + where T: Ord { .. } + +impl Ord for [T] + where T: Ord { .. } + +impl Ord for (A, B) + where A: Ord, B: Ord { .. } + +impl Ord for (A, B, C) + where A: Ord, B: Ord, C: Ord { .. } + +impl Ord for (A, B, C, D) + where A: Ord, B: Ord, C: Ord, D: Ord { .. } + +impl Ord for (A, B, C, D, E) + where A: Ord, B: Ord, C: Ord, D: Ord, E: Ord { .. } +``` + +## `std::ops` + +### `std::ops::Add`, `std::ops::Sub`, `std::ops::Mul`, and `std::ops::Div` + +These traits abstract over addition, subtraction, multiplication, and division respectively. +Implementing these traits for a given type will also allow that type to be used with the corresponding operator +for that trait (`+` for Add, etc) in addition to the normal method names. + +```rust title="add-trait" showLineNumbers +trait Add { + fn add(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L1-L5 + +```rust title="sub-trait" showLineNumbers +trait Sub { + fn sub(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L17-L21 + +```rust title="mul-trait" showLineNumbers +trait Mul { + fn mul(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L33-L37 + +```rust title="div-trait" showLineNumbers +trait Div { + fn div(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L49-L53 + + +The implementations block below is given for the `Add` trait, but the same types that implement +`Add` also implement `Sub`, `Mul`, and `Div`. + +Implementations: +```rust +impl Add for Field { .. } + +impl Add for i8 { .. } +impl Add for i16 { .. } +impl Add for i32 { .. } +impl Add for i64 { .. } + +impl Add for u8 { .. } +impl Add for u16 { .. } +impl Add for u32 { .. } +impl Add for u64 { .. } +``` + +### `std::ops::Rem` + +```rust title="rem-trait" showLineNumbers +trait Rem{ + fn rem(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L65-L69 + + +`Rem::rem(a, b)` is the remainder function returning the result of what is +left after dividing `a` and `b`. Implementing `Rem` allows the `%` operator +to be used with the implementation type. + +Unlike other numeric traits, `Rem` is not implemented for `Field`. + +Implementations: +```rust +impl Rem for u8 { fn rem(self, other: u8) -> u8 { self % other } } +impl Rem for u16 { fn rem(self, other: u16) -> u16 { self % other } } +impl Rem for u32 { fn rem(self, other: u32) -> u32 { self % other } } +impl Rem for u64 { fn rem(self, other: u64) -> u64 { self % other } } + +impl Rem for i8 { fn rem(self, other: i8) -> i8 { self % other } } +impl Rem for i16 { fn rem(self, other: i16) -> i16 { self % other } } +impl Rem for i32 { fn rem(self, other: i32) -> i32 { self % other } } +impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } +``` + +### `std::ops::{ BitOr, BitAnd, BitXor }` + +```rust title="bitor-trait" showLineNumbers +trait BitOr { + fn bitor(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L79-L83 + +```rust title="bitand-trait" showLineNumbers +trait BitAnd { + fn bitand(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L95-L99 + +```rust title="bitxor-trait" showLineNumbers +trait BitXor { + fn bitxor(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L111-L115 + + +Traits for the bitwise operations `|`, `&`, and `^`. + +Implementing `BitOr`, `BitAnd` or `BitXor` for a type allows the `|`, `&`, or `^` operator respectively +to be used with the type. + +The implementations block below is given for the `BitOr` trait, but the same types that implement +`BitOr` also implement `BitAnd` and `BitXor`. + +Implementations: +```rust +impl BitOr for bool { fn bitor(self, other: bool) -> bool { self | other } } + +impl BitOr for u8 { fn bitor(self, other: u8) -> u8 { self | other } } +impl BitOr for u16 { fn bitor(self, other: u16) -> u16 { self | other } } +impl BitOr for u32 { fn bitor(self, other: u32) -> u32 { self | other } } +impl BitOr for u64 { fn bitor(self, other: u64) -> u64 { self | other } } + +impl BitOr for i8 { fn bitor(self, other: i8) -> i8 { self | other } } +impl BitOr for i16 { fn bitor(self, other: i16) -> i16 { self | other } } +impl BitOr for i32 { fn bitor(self, other: i32) -> i32 { self | other } } +impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } +``` + +### `std::ops::{ Shl, Shr }` + +```rust title="shl-trait" showLineNumbers +trait Shl { + fn shl(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L127-L131 + +```rust title="shr-trait" showLineNumbers +trait Shr { + fn shr(self, other: Self) -> Self; +} +``` +> Source code: noir_stdlib/src/ops.nr#L142-L146 + + +Traits for a bit shift left and bit shift right. + +Implementing `Shl` for a type allows the left shift operator (`<<`) to be used with the implementation type. +Similarly, implementing `Shr` allows the right shift operator (`>>`) to be used with the type. + +Note that bit shifting is not currently implemented for signed types. + +The implementations block below is given for the `Shl` trait, but the same types that implement +`Shl` also implement `Shr`. + +Implementations: +```rust +impl Shl for u8 { fn shl(self, other: u8) -> u8 { self << other } } +impl Shl for u16 { fn shl(self, other: u16) -> u16 { self << other } } +impl Shl for u32 { fn shl(self, other: u32) -> u32 { self << other } } +impl Shl for u64 { fn shl(self, other: u64) -> u64 { self << other } } +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/zeroed.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/zeroed.md new file mode 100644 index 00000000000..f450fecdd36 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/noir/standard_library/zeroed.md @@ -0,0 +1,26 @@ +--- +title: Zeroed Function +description: + The zeroed function returns a zeroed value of any type. +keywords: + [ + zeroed + ] +--- + +Implements `fn zeroed() -> T` to return a zeroed value of any type. This function is generally unsafe to use as the zeroed bit pattern is not guaranteed to be valid for all types. It can however, be useful in cases when the value is guaranteed not to be used such as in a BoundedVec library implementing a growable vector, up to a certain length, backed by an array. The array can be initialized with zeroed values which are guaranteed to be inaccessible until the vector is pushed to. Similarly, enumerations in noir can be implemented using this method by providing zeroed values for the unused variants. + +You can access the function at `std::unsafe::zeroed`. + +This function currently supports the following types: + +- Field +- Bool +- Uint +- Array +- Slice +- String +- Tuple +- Function + +Using it on other types could result in unexpected behavior. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/.nojekyll b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/.nojekyll new file mode 100644 index 00000000000..e2ac6616add --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md new file mode 100644 index 00000000000..d7249d24330 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md @@ -0,0 +1,160 @@ +# BarretenbergBackend + +## Extends + +- `BarretenbergVerifierBackend` + +## Implements + +- [`Backend`](../index.md#backend) + +## Constructors + +### new BarretenbergBackend(acirCircuit, options) + +```ts +new BarretenbergBackend(acirCircuit, options): BarretenbergBackend +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `acirCircuit` | `CompiledCircuit` | +| `options` | [`BackendOptions`](../type-aliases/BackendOptions.md) | + +#### Returns + +[`BarretenbergBackend`](BarretenbergBackend.md) + +#### Inherited from + +BarretenbergVerifierBackend.constructor + +## Properties + +| Property | Type | Description | Inheritance | +| :------ | :------ | :------ | :------ | +| `acirComposer` | `any` | - | BarretenbergVerifierBackend.acirComposer | +| `acirUncompressedBytecode` | `Uint8Array` | - | BarretenbergVerifierBackend.acirUncompressedBytecode | +| `api` | `Barretenberg` | - | BarretenbergVerifierBackend.api | +| `options` | [`BackendOptions`](../type-aliases/BackendOptions.md) | - | BarretenbergVerifierBackend.options | + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Inherited from + +BarretenbergVerifierBackend.destroy + +*** + +### generateProof() + +```ts +generateProof(compressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `compressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<`ProofData`\> + +#### Description + +Generates a proof + +*** + +### generateRecursiveProofArtifacts() + +```ts +generateRecursiveProofArtifacts(proofData, numOfPublicInputs): Promise +``` + +Generates artifacts that will be passed to a circuit that will verify this proof. + +Instead of passing the proof and verification key as a byte array, we pass them +as fields which makes it cheaper to verify in a circuit. + +The proof that is passed here will have been created using a circuit +that has the #[recursive] attribute on its `main` method. + +The number of public inputs denotes how many public inputs are in the inner proof. + +#### Parameters + +| Parameter | Type | Default value | +| :------ | :------ | :------ | +| `proofData` | `ProofData` | `undefined` | +| `numOfPublicInputs` | `number` | `0` | + +#### Returns + +`Promise`\<`object`\> + +#### Example + +```typescript +const artifacts = await backend.generateRecursiveProofArtifacts(proof, numOfPublicInputs); +``` + +*** + +### getVerificationKey() + +```ts +getVerificationKey(): Promise +``` + +#### Returns + +`Promise`\<`Uint8Array`\> + +#### Inherited from + +BarretenbergVerifierBackend.getVerificationKey + +*** + +### verifyProof() + +```ts +verifyProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | `ProofData` | + +#### Returns + +`Promise`\<`boolean`\> + +#### Inherited from + +BarretenbergVerifierBackend.verifyProof + +#### Description + +Verifies a proof + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergVerifier.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergVerifier.md new file mode 100644 index 00000000000..500276ea748 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergVerifier.md @@ -0,0 +1,58 @@ +# BarretenbergVerifier + +## Constructors + +### new BarretenbergVerifier(options) + +```ts +new BarretenbergVerifier(options): BarretenbergVerifier +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `options` | [`BackendOptions`](../type-aliases/BackendOptions.md) | + +#### Returns + +[`BarretenbergVerifier`](BarretenbergVerifier.md) + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +*** + +### verifyProof() + +```ts +verifyProof(proofData, verificationKey): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | `ProofData` | +| `verificationKey` | `Uint8Array` | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Verifies a proof + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/index.md new file mode 100644 index 00000000000..64971973196 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/index.md @@ -0,0 +1,59 @@ +# backend_barretenberg + +## Exports + +### Classes + +| Class | Description | +| :------ | :------ | +| [BarretenbergBackend](classes/BarretenbergBackend.md) | - | +| [BarretenbergVerifier](classes/BarretenbergVerifier.md) | - | + +### Type Aliases + +| Type alias | Description | +| :------ | :------ | +| [BackendOptions](type-aliases/BackendOptions.md) | - | + +## References + +### CompiledCircuit + +Renames and re-exports [Backend](index.md#backend) + +*** + +### ProofData + +Renames and re-exports [Backend](index.md#backend) + +## Variables + +### Backend + +```ts +Backend: any; +``` + +## Functions + +### publicInputsToWitnessMap() + +```ts +publicInputsToWitnessMap(publicInputs, abi): Backend +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `publicInputs` | `string`[] | +| `abi` | `Abi` | + +#### Returns + +[`Backend`](index.md#backend) + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md new file mode 100644 index 00000000000..b49a479f4f4 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md @@ -0,0 +1,21 @@ +# BackendOptions + +```ts +type BackendOptions: object; +``` + +## Description + +An options object, currently only used to specify the number of threads to use. + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `memory` | `object` | - | +| `memory.maximum` | `number` | - | +| `threads` | `number` | **Description**

Number of threads | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs new file mode 100644 index 00000000000..d7d5128f9e3 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"category","label":"Classes","items":[{"type":"doc","id":"reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend","label":"BarretenbergBackend"},{"type":"doc","id":"reference/NoirJS/backend_barretenberg/classes/BarretenbergVerifier","label":"BarretenbergVerifier"}]},{"type":"category","label":"Type Aliases","items":[{"type":"doc","id":"reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions","label":"BackendOptions"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/.nojekyll b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/.nojekyll new file mode 100644 index 00000000000..e2ac6616add --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/classes/Noir.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/classes/Noir.md new file mode 100644 index 00000000000..45dd62ee57e --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/classes/Noir.md @@ -0,0 +1,132 @@ +# Noir + +## Constructors + +### new Noir(circuit, backend) + +```ts +new Noir(circuit, backend?): Noir +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `circuit` | `CompiledCircuit` | +| `backend`? | `any` | + +#### Returns + +[`Noir`](Noir.md) + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Description + +Destroys the underlying backend instance. + +#### Example + +```typescript +await noir.destroy(); +``` + +*** + +### execute() + +```ts +execute(inputs, foreignCallHandler?): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `inputs` | `InputMap` | +| `foreignCallHandler`? | [`ForeignCallHandler`](../type-aliases/ForeignCallHandler.md) | + +#### Returns + +`Promise`\<`object`\> + +#### Description + +Allows to execute a circuit to get its witness and return value. + +#### Example + +```typescript +async execute(inputs) +``` + +*** + +### generateProof() + +```ts +generateProof(inputs, foreignCallHandler?): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `inputs` | `InputMap` | +| `foreignCallHandler`? | [`ForeignCallHandler`](../type-aliases/ForeignCallHandler.md) | + +#### Returns + +`Promise`\<`ProofData`\> + +#### Description + +Generates a witness and a proof given an object as input. + +#### Example + +```typescript +async generateProof(input) +``` + +*** + +### verifyProof() + +```ts +verifyProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | `ProofData` | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Instantiates the verification key and verifies a proof. + +#### Example + +```typescript +async verifyProof(proof) +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/and.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/and.md new file mode 100644 index 00000000000..c783283e396 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/and.md @@ -0,0 +1,22 @@ +# and() + +```ts +and(lhs, rhs): string +``` + +Performs a bitwise AND operation between `lhs` and `rhs` + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `lhs` | `string` | | +| `rhs` | `string` | | + +## Returns + +`string` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/blake2s256.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/blake2s256.md new file mode 100644 index 00000000000..7882d0da8d5 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/blake2s256.md @@ -0,0 +1,21 @@ +# blake2s256() + +```ts +blake2s256(inputs): Uint8Array +``` + +Calculates the Blake2s256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md new file mode 100644 index 00000000000..5e3cd53e9d3 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md @@ -0,0 +1,28 @@ +# ecdsa\_secp256k1\_verify() + +```ts +ecdsa_secp256k1_verify( + hashed_msg, + public_key_x_bytes, + public_key_y_bytes, + signature): boolean +``` + +Verifies a ECDSA signature over the secp256k1 curve. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `hashed_msg` | `Uint8Array` | | +| `public_key_x_bytes` | `Uint8Array` | | +| `public_key_y_bytes` | `Uint8Array` | | +| `signature` | `Uint8Array` | | + +## Returns + +`boolean` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md new file mode 100644 index 00000000000..0b20ff68957 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md @@ -0,0 +1,28 @@ +# ecdsa\_secp256r1\_verify() + +```ts +ecdsa_secp256r1_verify( + hashed_msg, + public_key_x_bytes, + public_key_y_bytes, + signature): boolean +``` + +Verifies a ECDSA signature over the secp256r1 curve. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `hashed_msg` | `Uint8Array` | | +| `public_key_x_bytes` | `Uint8Array` | | +| `public_key_y_bytes` | `Uint8Array` | | +| `signature` | `Uint8Array` | | + +## Returns + +`boolean` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/keccak256.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/keccak256.md new file mode 100644 index 00000000000..d10f155ce86 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/keccak256.md @@ -0,0 +1,21 @@ +# keccak256() + +```ts +keccak256(inputs): Uint8Array +``` + +Calculates the Keccak256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/sha256.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/sha256.md new file mode 100644 index 00000000000..6ba4ecac022 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/sha256.md @@ -0,0 +1,21 @@ +# sha256() + +```ts +sha256(inputs): Uint8Array +``` + +Calculates the SHA256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/xor.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/xor.md new file mode 100644 index 00000000000..8d762b895d3 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/functions/xor.md @@ -0,0 +1,22 @@ +# xor() + +```ts +xor(lhs, rhs): string +``` + +Performs a bitwise XOR operation between `lhs` and `rhs` + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `lhs` | `string` | | +| `rhs` | `string` | | + +## Returns + +`string` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/index.md new file mode 100644 index 00000000000..cca6b3ace41 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/index.md @@ -0,0 +1,54 @@ +# noir_js + +## Exports + +### Classes + +| Class | Description | +| :------ | :------ | +| [Noir](classes/Noir.md) | - | + +### Type Aliases + +| Type alias | Description | +| :------ | :------ | +| [ForeignCallHandler](type-aliases/ForeignCallHandler.md) | A callback which performs an foreign call and returns the response. | +| [ForeignCallInput](type-aliases/ForeignCallInput.md) | - | +| [ForeignCallOutput](type-aliases/ForeignCallOutput.md) | - | +| [WitnessMap](type-aliases/WitnessMap.md) | - | + +### Functions + +| Function | Description | +| :------ | :------ | +| [and](functions/and.md) | Performs a bitwise AND operation between `lhs` and `rhs` | +| [blake2s256](functions/blake2s256.md) | Calculates the Blake2s256 hash of the input bytes | +| [ecdsa\_secp256k1\_verify](functions/ecdsa_secp256k1_verify.md) | Verifies a ECDSA signature over the secp256k1 curve. | +| [ecdsa\_secp256r1\_verify](functions/ecdsa_secp256r1_verify.md) | Verifies a ECDSA signature over the secp256r1 curve. | +| [keccak256](functions/keccak256.md) | Calculates the Keccak256 hash of the input bytes | +| [sha256](functions/sha256.md) | Calculates the SHA256 hash of the input bytes | +| [xor](functions/xor.md) | Performs a bitwise XOR operation between `lhs` and `rhs` | + +## References + +### CompiledCircuit + +Renames and re-exports [InputMap](index.md#inputmap) + +*** + +### ProofData + +Renames and re-exports [InputMap](index.md#inputmap) + +## Variables + +### InputMap + +```ts +InputMap: any; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md new file mode 100644 index 00000000000..812b8b16481 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md @@ -0,0 +1,24 @@ +# ForeignCallHandler + +```ts +type ForeignCallHandler: (name, inputs) => Promise; +``` + +A callback which performs an foreign call and returns the response. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `name` | `string` | The identifier for the type of foreign call being performed. | +| `inputs` | [`ForeignCallInput`](ForeignCallInput.md)[] | An array of hex encoded inputs to the foreign call. | + +## Returns + +`Promise`\<[`ForeignCallOutput`](ForeignCallOutput.md)[]\> + +outputs - An array of hex encoded outputs containing the results of the foreign call. + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md new file mode 100644 index 00000000000..dd95809186a --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md @@ -0,0 +1,9 @@ +# ForeignCallInput + +```ts +type ForeignCallInput: string[]; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md new file mode 100644 index 00000000000..b71fb78a946 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md @@ -0,0 +1,9 @@ +# ForeignCallOutput + +```ts +type ForeignCallOutput: string | string[]; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/type-aliases/WitnessMap.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/type-aliases/WitnessMap.md new file mode 100644 index 00000000000..258c46f9d0c --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/type-aliases/WitnessMap.md @@ -0,0 +1,9 @@ +# WitnessMap + +```ts +type WitnessMap: Map; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/typedoc-sidebar.cjs b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/typedoc-sidebar.cjs new file mode 100644 index 00000000000..c6d8125eaad --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_js/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"category","label":"Classes","items":[{"type":"doc","id":"reference/NoirJS/noir_js/classes/Noir","label":"Noir"}]},{"type":"category","label":"Type Aliases","items":[{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/ForeignCallHandler","label":"ForeignCallHandler"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/ForeignCallInput","label":"ForeignCallInput"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/ForeignCallOutput","label":"ForeignCallOutput"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/WitnessMap","label":"WitnessMap"}]},{"type":"category","label":"Functions","items":[{"type":"doc","id":"reference/NoirJS/noir_js/functions/and","label":"and"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/blake2s256","label":"blake2s256"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify","label":"ecdsa_secp256k1_verify"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify","label":"ecdsa_secp256r1_verify"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/keccak256","label":"keccak256"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/sha256","label":"sha256"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/xor","label":"xor"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/.nojekyll b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/.nojekyll new file mode 100644 index 00000000000..e2ac6616add --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/functions/compile.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/functions/compile.md new file mode 100644 index 00000000000..6faf763b37f --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/functions/compile.md @@ -0,0 +1,51 @@ +# compile() + +```ts +compile( + fileManager, + projectPath?, + logFn?, +debugLogFn?): Promise +``` + +Compiles a Noir project + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `fileManager` | `FileManager` | The file manager to use | +| `projectPath`? | `string` | The path to the project inside the file manager. Defaults to the root of the file manager | +| `logFn`? | `LogFn` | A logging function. If not provided, console.log will be used | +| `debugLogFn`? | `LogFn` | A debug logging function. If not provided, logFn will be used | + +## Returns + +`Promise`\<[`ProgramCompilationArtifacts`](../index.md#programcompilationartifacts)\> + +## Example + +```typescript +// Node.js + +import { compile_program, createFileManager } from '@noir-lang/noir_wasm'; + +const fm = createFileManager(myProjectPath); +const myCompiledCode = await compile_program(fm); +``` + +```typescript +// Browser + +import { compile_program, createFileManager } from '@noir-lang/noir_wasm'; + +const fm = createFileManager('/'); +for (const path of files) { + await fm.writeFile(path, await getFileAsStream(path)); +} +const myCompiledCode = await compile_program(fm); +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/functions/compile_contract.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/functions/compile_contract.md new file mode 100644 index 00000000000..7d0b39a43ef --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/functions/compile_contract.md @@ -0,0 +1,51 @@ +# compile\_contract() + +```ts +compile_contract( + fileManager, + projectPath?, + logFn?, +debugLogFn?): Promise +``` + +Compiles a Noir project + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `fileManager` | `FileManager` | The file manager to use | +| `projectPath`? | `string` | The path to the project inside the file manager. Defaults to the root of the file manager | +| `logFn`? | `LogFn` | A logging function. If not provided, console.log will be used | +| `debugLogFn`? | `LogFn` | A debug logging function. If not provided, logFn will be used | + +## Returns + +`Promise`\<[`ContractCompilationArtifacts`](../index.md#contractcompilationartifacts)\> + +## Example + +```typescript +// Node.js + +import { compile_contract, createFileManager } from '@noir-lang/noir_wasm'; + +const fm = createFileManager(myProjectPath); +const myCompiledCode = await compile_contract(fm); +``` + +```typescript +// Browser + +import { compile_contract, createFileManager } from '@noir-lang/noir_wasm'; + +const fm = createFileManager('/'); +for (const path of files) { + await fm.writeFile(path, await getFileAsStream(path)); +} +const myCompiledCode = await compile_contract(fm); +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/functions/createFileManager.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/functions/createFileManager.md new file mode 100644 index 00000000000..7e65c1d69c7 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/functions/createFileManager.md @@ -0,0 +1,21 @@ +# createFileManager() + +```ts +createFileManager(dataDir): FileManager +``` + +Creates a new FileManager instance based on fs in node and memfs in the browser (via webpack alias) + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `dataDir` | `string` | root of the file system | + +## Returns + +`FileManager` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/functions/inflateDebugSymbols.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/functions/inflateDebugSymbols.md new file mode 100644 index 00000000000..fcea9275341 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/functions/inflateDebugSymbols.md @@ -0,0 +1,21 @@ +# inflateDebugSymbols() + +```ts +inflateDebugSymbols(debugSymbols): any +``` + +Decompresses and decodes the debug symbols + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `debugSymbols` | `string` | The base64 encoded debug symbols | + +## Returns + +`any` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/index.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/index.md new file mode 100644 index 00000000000..b6e0f9d1bc0 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/index.md @@ -0,0 +1,49 @@ +# noir_wasm + +## Exports + +### Functions + +| Function | Description | +| :------ | :------ | +| [compile](functions/compile.md) | Compiles a Noir project | +| [compile\_contract](functions/compile_contract.md) | Compiles a Noir project | +| [createFileManager](functions/createFileManager.md) | Creates a new FileManager instance based on fs in node and memfs in the browser (via webpack alias) | +| [inflateDebugSymbols](functions/inflateDebugSymbols.md) | Decompresses and decodes the debug symbols | + +## References + +### compile\_program + +Renames and re-exports [compile](functions/compile.md) + +## Interfaces + +### ContractCompilationArtifacts + +The compilation artifacts of a given contract. + +#### Properties + +| Property | Type | Description | +| :------ | :------ | :------ | +| `contract` | `ContractArtifact` | The compiled contract. | +| `warnings` | `unknown`[] | Compilation warnings. | + +*** + +### ProgramCompilationArtifacts + +The compilation artifacts of a given program. + +#### Properties + +| Property | Type | Description | +| :------ | :------ | :------ | +| `name` | `string` | not part of the compilation output, injected later | +| `program` | `ProgramArtifact` | The compiled contract. | +| `warnings` | `unknown`[] | Compilation warnings. | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/typedoc-sidebar.cjs b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/typedoc-sidebar.cjs new file mode 100644 index 00000000000..e0870710349 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/NoirJS/noir_wasm/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"doc","id":"reference/NoirJS/noir_wasm/index","label":"API"},{"type":"category","label":"Functions","items":[{"type":"doc","id":"reference/NoirJS/noir_wasm/functions/compile","label":"compile"},{"type":"doc","id":"reference/NoirJS/noir_wasm/functions/compile_contract","label":"compile_contract"},{"type":"doc","id":"reference/NoirJS/noir_wasm/functions/createFileManager","label":"createFileManager"},{"type":"doc","id":"reference/NoirJS/noir_wasm/functions/inflateDebugSymbols","label":"inflateDebugSymbols"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/_category_.json new file mode 100644 index 00000000000..5b6a20a609a --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 4, + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/debugger/_category_.json b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/debugger/_category_.json new file mode 100644 index 00000000000..27869205ad3 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/debugger/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Debugger", + "position": 1, + "collapsible": true, + "collapsed": true +} diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/debugger/debugger_known_limitations.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/debugger/debugger_known_limitations.md new file mode 100644 index 00000000000..936d416ac4b --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/debugger/debugger_known_limitations.md @@ -0,0 +1,59 @@ +--- +title: Known limitations +description: + An overview of known limitations of the current version of the Noir debugger +keywords: + [ + Nargo, + Noir Debugger, + VS Code, + ] +sidebar_position: 2 +--- + +# Debugger Known Limitations + +There are currently some limits to what the debugger can observe. + +## Mutable references + +The debugger is currently blind to any state mutated via a mutable reference. For example, in: + +``` +let mut x = 1; +let y = &mut x; +*y = 2; +``` + +The update on `x` will not be observed by the debugger. That means, when running `vars` from the debugger REPL, or inspecting the _local variables_ pane in the VS Code debugger, `x` will appear with value 1 despite having executed `*y = 2;`. + +## Variables of type function or mutable references are opaque + +When inspecting variables, any variable of type `Function` or `MutableReference` will render its value as `<>` or `<>`. + +## Debugger instrumentation affects resulting ACIR + +In order to make the state of local variables observable, the debugger compiles Noir circuits interleaving foreign calls that track any mutations to them. While this works (except in the cases described above) and doesn't introduce any behavior changes, it does as a side effect produce bigger bytecode. In particular, when running the command `opcodes` on the REPL debugger, you will notice Unconstrained VM blocks that look like this: + +``` +... +5 BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [], q_c: 2 }), Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(2))], q_c: 0 })] + | outputs=[] + 5.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 5.1 | Mov { destination: RegisterIndex(3), source: RegisterIndex(1) } + 5.2 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 5.3 | Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 5.4 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 5.5 | Mov { destination: RegisterIndex(3), source: RegisterIndex(3) } + 5.6 | Call { location: 8 } + 5.7 | Stop + 5.8 | ForeignCall { function: "__debug_var_assign", destinations: [], inputs: [RegisterIndex(RegisterIndex(2)), RegisterIndex(RegisterIndex(3))] } +... +``` + +If you are interested in debugging/inspecting compiled ACIR without these synthetic changes, you can invoke the REPL debugger with the `--skip-instrumentation` flag or launch the VS Code debugger with the `skipConfiguration` property set to true in its launch configuration. You can find more details about those in the [Debugger REPL reference](debugger_repl.md) and the [VS Code Debugger reference](debugger_vscode.md). + +:::note +Skipping debugger instrumentation means you won't be able to inspect values of local variables. +::: + diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/debugger/debugger_repl.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/debugger/debugger_repl.md new file mode 100644 index 00000000000..46e2011304e --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/debugger/debugger_repl.md @@ -0,0 +1,360 @@ +--- +title: REPL Debugger +description: + Noir Debugger REPL options and commands. +keywords: + [ + Nargo, + Noir CLI, + Noir Debugger, + REPL, + ] +sidebar_position: 1 +--- + +## Running the REPL debugger + +`nargo debug [OPTIONS] [WITNESS_NAME]` + +Runs the Noir REPL debugger. If a `WITNESS_NAME` is provided the debugger writes the resulting execution witness to a `WITNESS_NAME` file. + +### Options + +| Option | Description | +| --------------------- | ------------------------------------------------------------ | +| `-p, --prover-name ` | The name of the toml file which contains the inputs for the prover [default: Prover]| +| `--package ` | The name of the package to debug | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +None of these options are required. + +:::note +Since the debugger starts by compiling the target package, all Noir compiler options are also available. Check out the [compiler reference](../nargo_commands.md#nargo-compile) to learn more about the compiler options. +::: + +## REPL commands + +Once the debugger is running, it accepts the following commands. + +#### `help` (h) + +Displays the menu of available commands. + +``` +> help +Available commands: + + opcodes display ACIR opcodes + into step into to the next opcode + next step until a new source location is reached + out step until a new source location is reached + and the current stack frame is finished + break LOCATION:OpcodeLocation add a breakpoint at an opcode location + over step until a new source location is reached + without diving into function calls + restart restart the debugging session + delete LOCATION:OpcodeLocation delete breakpoint at an opcode location + witness show witness map + witness index:u32 display a single witness from the witness map + witness index:u32 value:String update a witness with the given value + memset index:usize value:String update a memory cell with the given + value + continue continue execution until the end of the + program + vars show variable values available at this point + in execution + stacktrace display the current stack trace + memory show memory (valid when executing unconstrained code) value + step step to the next ACIR opcode + +Other commands: + + help Show this help message + quit Quit repl + +``` + +### Stepping through programs + +#### `next` (n) + +Step until the next Noir source code location. While other commands, such as [`into`](#into-i) and [`step`](#step-s), allow for finer grained control of the program's execution at the opcode level, `next` is source code centric. For example: + +``` +3 ... +4 fn main(x: u32) { +5 assert(entry_point(x) == 2); +6 swap_entry_point(x, x + 1); +7 -> assert(deep_entry_point(x) == 4); +8 multiple_values_entry_point(x); +9 } +``` + + +Using `next` here would cause the debugger to jump to the definition of `deep_entry_point` (if available). + +If you want to step over `deep_entry_point` and go straight to line 8, use [the `over` command](#over) instead. + +#### `over` + +Step until the next source code location, without diving into function calls. For example: + +``` +3 ... +4 fn main(x: u32) { +5 assert(entry_point(x) == 2); +6 swap_entry_point(x, x + 1); +7 -> assert(deep_entry_point(x) == 4); +8 multiple_values_entry_point(x); +9 } +``` + + +Using `over` here would cause the debugger to execute until line 8 (`multiple_values_entry_point(x);`). + +If you want to step into `deep_entry_point` instead, use [the `next` command](#next-n). + +#### `out` + +Step until the end of the current function call. For example: + +``` + 3 ... + 4 fn main(x: u32) { + 5 assert(entry_point(x) == 2); + 6 swap_entry_point(x, x + 1); + 7 -> assert(deep_entry_point(x) == 4); + 8 multiple_values_entry_point(x); + 9 } + 10 + 11 unconstrained fn returns_multiple_values(x: u32) -> (u32, u32, u32, u32) { + 12 ... + ... + 55 + 56 unconstrained fn deep_entry_point(x: u32) -> u32 { + 57 -> level_1(x + 1) + 58 } + +``` + +Running `out` here will resume execution until line 8. + +#### `step` (s) + +Skips to the next ACIR code. A compiled Noir program is a sequence of ACIR opcodes. However, an unconstrained VM opcode denotes the start of an unconstrained code block, to be executed by the unconstrained VM. For example (redacted for brevity): + +``` +0 BLACKBOX::RANGE [(_0, num_bits: 32)] [ ] +1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(0))], q_c: 0 })] outputs=[Simple(Witness(1))] + 1.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 1.1 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 1.2 | Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 1.3 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 1.4 | Call { location: 7 } + ... + 1.43 | Return +2 EXPR [ (1, _1) -2 ] +``` + +The `->` here shows the debugger paused at an ACIR opcode: `BRILLIG`, at index 1, which denotes an unconstrained code block is about to start. + +Using the `step` command at this point would result in the debugger stopping at ACIR opcode 2, `EXPR`, skipping unconstrained computation steps. + +Use [the `into` command](#into-i) instead if you want to follow unconstrained computation step by step. + +#### `into` (i) + +Steps into the next opcode. A compiled Noir program is a sequence of ACIR opcodes. However, a BRILLIG opcode denotes the start of an unconstrained code block, to be executed by the unconstrained VM. For example (redacted for brevity): + +``` +0 BLACKBOX::RANGE [(_0, num_bits: 32)] [ ] +1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(0))], q_c: 0 })] outputs=[Simple(Witness(1))] + 1.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 1.1 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 1.2 | Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 1.3 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 1.4 | Call { location: 7 } + ... + 1.43 | Return +2 EXPR [ (1, _1) -2 ] +``` + +The `->` here shows the debugger paused at an ACIR opcode: `BRILLIG`, at index 1, which denotes an unconstrained code block is about to start. + +Using the `into` command at this point would result in the debugger stopping at opcode 1.0, `Mov ...`, allowing the debugger user to follow unconstrained computation step by step. + +Use [the `step` command](#step-s) instead if you want to skip to the next ACIR code directly. + +#### `continue` (c) + +Continues execution until the next breakpoint, or the end of the program. + +#### `restart` (res) + +Interrupts execution, and restarts a new debugging session from scratch. + +#### `opcodes` (o) + +Display the program's ACIR opcode sequence. For example: + +``` +0 BLACKBOX::RANGE [(_0, num_bits: 32)] [ ] +1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(0))], q_c: 0 })] outputs=[Simple(Witness(1))] + 1.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 1.1 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 1.2 | Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 1.3 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 1.4 | Call { location: 7 } + ... + 1.43 | Return +2 EXPR [ (1, _1) -2 ] +``` + +### Breakpoints + +#### `break [Opcode]` (or shorthand `b [Opcode]`) + +Sets a breakpoint on the specified opcode index. To get a list of the program opcode numbers, see [the `opcode` command](#opcodes-o). For example: + +``` +0 BLACKBOX::RANGE [(_0, num_bits: 32)] [ ] +1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(0))], q_c: 0 })] outputs=[Simple(Witness(1))] + 1.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 1.1 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 1.2 | Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 1.3 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 1.4 | Call { location: 7 } + ... + 1.43 | Return +2 EXPR [ (1, _1) -2 ] +``` + +In this example, issuing a `break 1.2` command adds break on opcode 1.2, as denoted by the `*` character: + +``` +0 BLACKBOX::RANGE [(_0, num_bits: 32)] [ ] +1 -> BRILLIG inputs=[Single(Expression { mul_terms: [], linear_combinations: [(1, Witness(0))], q_c: 0 })] outputs=[Simple(Witness(1))] + 1.0 | Mov { destination: RegisterIndex(2), source: RegisterIndex(0) } + 1.1 | Const { destination: RegisterIndex(0), value: Value { inner: 0 } } + 1.2 | * Const { destination: RegisterIndex(1), value: Value { inner: 0 } } + 1.3 | Mov { destination: RegisterIndex(2), source: RegisterIndex(2) } + 1.4 | Call { location: 7 } + ... + 1.43 | Return +2 EXPR [ (1, _1) -2 ] +``` + +Running [the `continue` command](#continue-c) at this point would cause the debugger to execute the program until opcode 1.2. + +#### `delete [Opcode]` (or shorthand `d [Opcode]`) + +Deletes a breakpoint at an opcode location. Usage is analogous to [the `break` command](#). + +### Variable inspection + +#### vars + +Show variable values available at this point in execution. + +:::note +The ability to inspect variable values from the debugger depends on compilation to be run in a special debug instrumentation mode. This instrumentation weaves variable tracing code with the original source code. + +So variable value inspection comes at the expense of making the resulting ACIR bytecode bigger and harder to understand and optimize. + +If you find this compromise unacceptable, you can run the debugger with the flag `--skip-debug-instrumentation`. This will compile your circuit without any additional debug information, so the resulting ACIR bytecode will be identical to the one produced by standard Noir compilation. However, if you opt for this, the `vars` command will not be available while debugging. +::: + + +### Stacktrace + +#### `stacktrace` + +Displays the current stack trace. + + +### Witness map + +#### `witness` (w) + +Show witness map. For example: + +``` +_0 = 0 +_1 = 2 +_2 = 1 +``` + +#### `witness [Witness Index]` + +Display a single witness from the witness map. For example: + +``` +> witness 1 +_1 = 2 +``` + +#### `witness [Witness Index] [New value]` + +Overwrite the given index with a new value. For example: + +``` +> witness 1 3 +_1 = 3 +``` + + +### Unconstrained VM memory + +#### `memory` + +Show unconstrained VM memory state. For example: + +``` +> memory +At opcode 1.13: Store { destination_pointer: RegisterIndex(0), source: RegisterIndex(3) } +... +> registers +0 = 0 +1 = 10 +2 = 0 +3 = 1 +4 = 1 +5 = 2³² +6 = 1 +> into +At opcode 1.14: Const { destination: RegisterIndex(5), value: Value { inner: 1 } } +... +> memory +0 = 1 +> +``` + +In the example above: we start with clean memory, then step through a `Store` opcode which stores the value of register 3 (1) into the memory address stored in register 0 (0). Thus now `memory` shows memory address 0 contains value 1. + +:::note +This command is only functional while the debugger is executing unconstrained code. +::: + +#### `memset [Memory address] [New value]` + +Update a memory cell with the given value. For example: + +``` +> memory +0 = 1 +> memset 0 2 +> memory +0 = 2 +> memset 1 4 +> memory +0 = 2 +1 = 4 +> +``` + +:::note +This command is only functional while the debugger is executing unconstrained code. +::: \ No newline at end of file diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/debugger/debugger_vscode.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/debugger/debugger_vscode.md new file mode 100644 index 00000000000..c027332b3b0 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/debugger/debugger_vscode.md @@ -0,0 +1,82 @@ +--- +title: VS Code Debugger +description: + VS Code Debugger configuration and features. +keywords: + [ + Nargo, + Noir CLI, + Noir Debugger, + VS Code, + IDE, + ] +sidebar_position: 0 +--- + +# VS Code Noir Debugger Reference + +The Noir debugger enabled by the vscode-noir extension ships with default settings such that the most common scenario should run without any additional configuration steps. + +These defaults can nevertheless be overridden by defining a launch configuration file. This page provides a reference for the properties you can override via a launch configuration file, as well as documenting the Nargo `dap` command, which is a dependency of the VS Code Noir debugger. + + +## Creating and editing launch configuration files + +To create a launch configuration file from VS Code, open the _debug pane_, and click on _create a launch.json file_. + +![Creating a launch configuration file](@site/static/img/debugger/ref1-create-launch.png) + +A `launch.json` file will be created, populated with basic defaults. + +### Noir Debugger launch.json properties + +#### projectFolder + +_String, optional._ + +Absolute path to the Nargo project to debug. By default, it is dynamically determined by looking for the nearest `Nargo.toml` file to the active file at the moment of launching the debugger. + +#### proverName + +_String, optional._ + +Name of the prover input to use. Defaults to `Prover`, which looks for a file named `Prover.toml` at the `projectFolder`. + +#### generateAcir + +_Boolean, optional._ + +If true, generate ACIR opcodes instead of unconstrained opcodes which will be closer to release binaries but less convenient for debugging. Defaults to `false`. + +#### skipInstrumentation + +_Boolean, optional._ + +Skips variables debugging instrumentation of code, making debugging less convenient but the resulting binary smaller and closer to production. Defaults to `false`. + +:::note +Skipping instrumentation causes the debugger to be unable to inspect local variables. +::: + +## `nargo dap [OPTIONS]` + +When run without any option flags, it starts the Nargo Debug Adapter Protocol server, which acts as the debugging backend for the VS Code Noir Debugger. + +All option flags are related to preflight checks. The Debug Adapter Protocol specifies how errors are to be informed from a running DAP server, but it doesn't specify mechanisms to communicate server initialization errors between the DAP server and its client IDE. + +Thus `nargo dap` ships with a _preflight check_ mode. If flag `--preflight-check` and the rest of the `--preflight-*` flags are provided, Nargo will run the same initialization routine except it will not start the DAP server. + +`vscode-noir` will then run `nargo dap` in preflight check mode first before a debugging session starts. If the preflight check ends in error, vscode-noir will present stderr and stdout output from this process through its own Output pane in VS Code. This makes it possible for users to diagnose what pieces of configuration might be wrong or missing in case of initialization errors. + +If the preflight check succeeds, `vscode-noir` proceeds to start the DAP server normally but running `nargo dap` without any additional flags. + +### Options + +| Option | Description | +| --------------------------------------------------------- | --------------------------------------------------------------------------------------------------------- | +| `--preflight-check` | If present, dap runs in preflight check mode. | +| `--preflight-project-folder ` | Absolute path to the project to debug for preflight check. | +| `--preflight-prover-name ` | Name of prover file to use for preflight check | +| `--preflight-generate-acir` | Optional. If present, compile in ACIR mode while running preflight check. | +| `--preflight-skip-instrumentation` | Optional. If present, compile without introducing debug instrumentation while running preflight check. | +| `-h, --help` | Print help. | diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/nargo_commands.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/nargo_commands.md new file mode 100644 index 00000000000..218fcfb0c8c --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/reference/nargo_commands.md @@ -0,0 +1,381 @@ +--- +title: Nargo +description: + Noir CLI Commands for Noir Prover and Verifier to create, execute, prove and verify programs, + generate Solidity verifier smart contract and compile into JSON file containing ACIR + representation and ABI of circuit. +keywords: + [ + Nargo, + Noir CLI, + Noir Prover, + Noir Verifier, + generate Solidity verifier, + compile JSON file, + ACIR representation, + ABI of circuit, + TypeScript, + ] +sidebar_position: 0 +--- + +# Command-Line Help for `nargo` + +This document contains the help content for the `nargo` command-line program. + +**Command Overview:** + +* [`nargo`↴](#nargo) +* [`nargo backend`↴](#nargo-backend) +* [`nargo backend current`↴](#nargo-backend-current) +* [`nargo backend ls`↴](#nargo-backend-ls) +* [`nargo backend use`↴](#nargo-backend-use) +* [`nargo backend install`↴](#nargo-backend-install) +* [`nargo backend uninstall`↴](#nargo-backend-uninstall) +* [`nargo check`↴](#nargo-check) +* [`nargo fmt`↴](#nargo-fmt) +* [`nargo codegen-verifier`↴](#nargo-codegen-verifier) +* [`nargo compile`↴](#nargo-compile) +* [`nargo new`↴](#nargo-new) +* [`nargo init`↴](#nargo-init) +* [`nargo execute`↴](#nargo-execute) +* [`nargo prove`↴](#nargo-prove) +* [`nargo verify`↴](#nargo-verify) +* [`nargo test`↴](#nargo-test) +* [`nargo info`↴](#nargo-info) +* [`nargo lsp`↴](#nargo-lsp) + +## `nargo` + +Noir's package manager + +**Usage:** `nargo ` + +###### **Subcommands:** + +* `backend` — Install and select custom backends used to generate and verify proofs +* `check` — Checks the constraint system for errors +* `fmt` — Format the Noir files in a workspace +* `codegen-verifier` — Generates a Solidity verifier smart contract for the program +* `compile` — Compile the program and its secret execution trace into ACIR format +* `new` — Create a Noir project in a new directory +* `init` — Create a Noir project in the current directory +* `execute` — Executes a circuit to calculate its return value +* `prove` — Create proof for this program. The proof is returned as a hex encoded string +* `verify` — Given a proof and a program, verify whether the proof is valid +* `test` — Run the tests for this program +* `info` — Provides detailed information on each of a program's function (represented by a single circuit) +* `lsp` — Starts the Noir LSP server + +###### **Options:** + + + + +## `nargo backend` + +Install and select custom backends used to generate and verify proofs + +**Usage:** `nargo backend ` + +###### **Subcommands:** + +* `current` — Prints the name of the currently active backend +* `ls` — Prints the list of currently installed backends +* `use` — Select the backend to use +* `install` — Install a new backend from a URL +* `uninstall` — Uninstalls a backend + + + +## `nargo backend current` + +Prints the name of the currently active backend + +**Usage:** `nargo backend current` + + + +## `nargo backend ls` + +Prints the list of currently installed backends + +**Usage:** `nargo backend ls` + + + +## `nargo backend use` + +Select the backend to use + +**Usage:** `nargo backend use ` + +###### **Arguments:** + +* `` + + + +## `nargo backend install` + +Install a new backend from a URL + +**Usage:** `nargo backend install ` + +###### **Arguments:** + +* `` — The name of the backend to install +* `` — The URL from which to download the backend + + + +## `nargo backend uninstall` + +Uninstalls a backend + +**Usage:** `nargo backend uninstall ` + +###### **Arguments:** + +* `` — The name of the backend to uninstall + + + +## `nargo check` + +Checks the constraint system for errors + +**Usage:** `nargo check [OPTIONS]` + +###### **Options:** + +* `--package ` — The name of the package to check +* `--workspace` — Check all packages in the workspace +* `--overwrite` — Force overwrite of existing files +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings + + + +## `nargo fmt` + +Format the Noir files in a workspace + +**Usage:** `nargo fmt [OPTIONS]` + +###### **Options:** + +* `--check` — Run noirfmt in check mode + + + +## `nargo codegen-verifier` + +Generates a Solidity verifier smart contract for the program + +**Usage:** `nargo codegen-verifier [OPTIONS]` + +###### **Options:** + +* `--package ` — The name of the package to codegen +* `--workspace` — Codegen all packages in the workspace +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings + + + +## `nargo compile` + +Compile the program and its secret execution trace into ACIR format + +**Usage:** `nargo compile [OPTIONS]` + +###### **Options:** + +* `--package ` — The name of the package to compile +* `--workspace` — Compile all packages in the workspace +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings + + + +## `nargo new` + +Create a Noir project in a new directory + +**Usage:** `nargo new [OPTIONS] ` + +###### **Arguments:** + +* `` — The path to save the new project + +###### **Options:** + +* `--name ` — Name of the package [default: package directory name] +* `--lib` — Use a library template +* `--bin` — Use a binary template [default] +* `--contract` — Use a contract template + + + +## `nargo init` + +Create a Noir project in the current directory + +**Usage:** `nargo init [OPTIONS]` + +###### **Options:** + +* `--name ` — Name of the package [default: current directory name] +* `--lib` — Use a library template +* `--bin` — Use a binary template [default] +* `--contract` — Use a contract template + + + +## `nargo execute` + +Executes a circuit to calculate its return value + +**Usage:** `nargo execute [OPTIONS] [WITNESS_NAME]` + +###### **Arguments:** + +* `` — Write the execution witness to named file + +###### **Options:** + +* `-p`, `--prover-name ` — The name of the toml file which contains the inputs for the prover + + Default value: `Prover` +* `--package ` — The name of the package to execute +* `--workspace` — Execute all packages in the workspace +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings +* `--oracle-resolver ` — JSON RPC url to solve oracle calls + + + +## `nargo prove` + +Create proof for this program. The proof is returned as a hex encoded string + +**Usage:** `nargo prove [OPTIONS]` + +###### **Options:** + +* `-p`, `--prover-name ` — The name of the toml file which contains the inputs for the prover + + Default value: `Prover` +* `-v`, `--verifier-name ` — The name of the toml file which contains the inputs for the verifier + + Default value: `Verifier` +* `--verify` — Verify proof after proving +* `--package ` — The name of the package to prove +* `--workspace` — Prove all packages in the workspace +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings +* `--oracle-resolver ` — JSON RPC url to solve oracle calls + + + +## `nargo verify` + +Given a proof and a program, verify whether the proof is valid + +**Usage:** `nargo verify [OPTIONS]` + +###### **Options:** + +* `-v`, `--verifier-name ` — The name of the toml file which contains the inputs for the verifier + + Default value: `Verifier` +* `--package ` — The name of the package verify +* `--workspace` — Verify all packages in the workspace +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings + + + +## `nargo test` + +Run the tests for this program + +**Usage:** `nargo test [OPTIONS] [TEST_NAME]` + +###### **Arguments:** + +* `` — If given, only tests with names containing this string will be run + +###### **Options:** + +* `--show-output` — Display output of `println` statements +* `--exact` — Only run tests that match exactly +* `--package ` — The name of the package to test +* `--workspace` — Test all packages in the workspace +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings +* `--oracle-resolver ` — JSON RPC url to solve oracle calls + + + +## `nargo info` + +Provides detailed information on each of a program's function (represented by a single circuit) + +Current information provided per circuit: 1. The number of ACIR opcodes 2. Counts the final number gates in the circuit used by a backend + +**Usage:** `nargo info [OPTIONS]` + +###### **Options:** + +* `--package ` — The name of the package to detail +* `--workspace` — Detail all packages in the workspace +* `--expression-width ` — Override the expression width requested by the backend +* `--force` — Force a full recompilation +* `--print-acir` — Display the ACIR for compiled circuit +* `--deny-warnings` — Treat all warnings as errors +* `--silence-warnings` — Suppress warnings + + + +## `nargo lsp` + +Starts the Noir LSP server + +Starts an LSP server which allows IDEs such as VS Code to display diagnostics in Noir source. + +VS Code Noir Language Support: https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir + +**Usage:** `nargo lsp` + + + +
+ + + This document was generated automatically by + clap-markdown. + + diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/tooling/debugger.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/tooling/debugger.md new file mode 100644 index 00000000000..184c436068f --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/tooling/debugger.md @@ -0,0 +1,27 @@ +--- +title: Debugger +description: Learn about the Noir Debugger, in its REPL or VS Code versions. +keywords: [Nargo, VSCode, Visual Studio Code, REPL, Debugger] +sidebar_position: 2 +--- + +# Noir Debugger + +There are currently two ways of debugging Noir programs: + +1. From VS Code, via the [vscode-noir](https://github.com/noir-lang/vscode-noir) extension. You can install it via the [Visual Studio Marketplace](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir). +2. Via the REPL debugger, which ships with Nargo. + +In order to use either version of the debugger, you will need to install recent enough versions of Noir, [Nargo](../getting_started/installation) and vscode-noir: + +- Noir 0.xx +- Nargo 0.xx +- vscode-noir 0.xx + +:::info +At the moment, the debugger supports debugging binary projects, but not contracts. +::: + +We cover the VS Code Noir debugger more in depth in [its VS Code debugger how-to guide](../how_to/debugger/debugging_with_vs_code.md) and [the reference](../reference/debugger/debugger_vscode.md). + +The REPL debugger is discussed at length in [the REPL debugger how-to guide](../how_to/debugger/debugging_with_the_repl.md) and [the reference](../reference/debugger/debugger_repl.md). diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/tooling/language_server.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/tooling/language_server.md new file mode 100644 index 00000000000..81e0356ef8a --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/tooling/language_server.md @@ -0,0 +1,43 @@ +--- +title: Language Server +description: Learn about the Noir Language Server, how to install the components, and configuration that may be required. +keywords: [Nargo, Language Server, LSP, VSCode, Visual Studio Code] +sidebar_position: 0 +--- + +This section helps you install and configure the Noir Language Server. + +The Language Server Protocol (LSP) has two components, the [Server](#language-server) and the [Client](#language-client). Below we describe each in the context of Noir. + +## Language Server + +The Server component is provided by the Nargo command line tool that you installed at the beginning of this guide. +As long as Nargo is installed and you've used it to run other commands in this guide, it should be good to go! + +If you'd like to verify that the `nargo lsp` command is available, you can run `nargo --help` and look for `lsp` in the list of commands. If you see it, you're using a version of Noir with LSP support. + +## Language Client + +The Client component is usually an editor plugin that launches the Server. It communicates LSP messages between the editor and the Server. For example, when you save a file, the Client will alert the Server, so it can try to compile the project and report any errors. + +Currently, Noir provides a Language Client for Visual Studio Code via the [vscode-noir](https://github.com/noir-lang/vscode-noir) extension. You can install it via the [Visual Studio Marketplace](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir). + +> **Note:** Noir's Language Server Protocol support currently assumes users' VSCode workspace root to be the same as users' Noir project root (i.e. where Nargo.toml lies). +> +> If LSP features seem to be missing / malfunctioning, make sure you are opening your Noir project directly (instead of as a sub-folder) in your VSCode instance. + +When your language server is running correctly and the VSCode plugin is installed, you should see handy codelens buttons for compilation, measuring circuit size, execution, and tests: + +![Compile and Execute](@site/static/img/codelens_compile_execute.png) +![Run test](@site/static/img/codelens_run_test.png) + +You should also see your tests in the `testing` panel: + +![Testing panel](@site/static/img/codelens_testing_panel.png) + +### Configuration + +- **Noir: Enable LSP** - If checked, the extension will launch the Language Server via `nargo lsp` and communicate with it. +- **Noir: Nargo Flags** - Additional flags may be specified if you require them to be added when the extension calls `nargo lsp`. +- **Noir: Nargo Path** - An absolute path to a Nargo binary with the `lsp` command. This may be useful if Nargo is not within the `PATH` of your editor. +- **Noir > Trace: Server** - Setting this to `"messages"` or `"verbose"` will log LSP messages between the Client and Server. Useful for debugging. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/tooling/testing.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/tooling/testing.md new file mode 100644 index 00000000000..d3e0c522473 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/tooling/testing.md @@ -0,0 +1,62 @@ +--- +title: Testing in Noir +description: Learn how to use Nargo to test your Noir program in a quick and easy way +keywords: [Nargo, testing, Noir, compile, test] +sidebar_position: 1 +--- + +You can test your Noir programs using Noir circuits. + +Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if +you run `nargo test`. + +For example if you have a program like: + +```rust +fn add(x: u64, y: u64) -> u64 { + x + y +} +#[test] +fn test_add() { + assert(add(2,2) == 4); + assert(add(0,1) == 1); + assert(add(1,0) == 1); +} +``` + +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't +have any arguments currently. + +### Test fail + +You can write tests that are expected to fail by using the decorator `#[test(should_fail)]`. For example: + +```rust +fn add(x: u64, y: u64) -> u64 { + x + y +} +#[test(should_fail)] +fn test_add() { + assert(add(2,2) == 5); +} +``` + +You can be more specific and make it fail with a specific reason by using `should_fail_with = "`: + +```rust +fn main(african_swallow_avg_speed : Field) { + assert(african_swallow_avg_speed == 65, "What is the airspeed velocity of an unladen swallow"); +} + +#[test] +fn test_king_arthur() { + main(65); +} + +#[test(should_fail_with = "What is the airspeed velocity of an unladen swallow")] +fn test_bridgekeeper() { + main(32); +} + +``` diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.28.0/tutorials/noirjs_app.md b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/tutorials/noirjs_app.md new file mode 100644 index 00000000000..6446e0b2a76 --- /dev/null +++ b/noir/noir-repo/docs/versioned_docs/version-v0.28.0/tutorials/noirjs_app.md @@ -0,0 +1,326 @@ +--- +title: Building a web app with NoirJS +description: Learn how to setup a new app that uses Noir to generate and verify zero-knowledge SNARK proofs in a typescript or javascript environment. +keywords: [how to, guide, javascript, typescript, noir, barretenberg, zero-knowledge, proofs, app] +sidebar_position: 0 +pagination_next: noir/concepts/data_types/index +--- + +NoirJS is a set of packages meant to work both in a browser and a server environment. In this tutorial, we will build a simple web app using them. From here, you should get an idea on how to proceed with your own Noir projects! + +You can find the complete app code for this guide [here](https://github.com/noir-lang/tiny-noirjs-app). + +## Setup + +:::note + +Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.27.x matches `noir_js@0.27.x`, etc. + +In this guide, we will be pinned to 0.27.0. + +::: + +Before we start, we want to make sure we have Node and Nargo installed. + +We start by opening a terminal and executing `node --version`. If we don't get an output like `v20.10.0`, that means node is not installed. Let's do that by following the handy [nvm guide](https://github.com/nvm-sh/nvm?tab=readme-ov-file#install--update-script). + +As for `Nargo`, we can follow the the [Nargo guide](../getting_started/installation/index.md) to install it. If you're lazy, just paste this on a terminal and run `noirup`: + +```sh +curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +``` + +Easy enough. Onwards! + +## Our project + +ZK is a powerful technology. An app that doesn't reveal one of the inputs to _anyone_ is almost unbelievable, yet Noir makes it as easy as a single line of code. + +In fact, it's so simple that it comes nicely packaged in `nargo`. Let's do that! + +### Nargo + +Run: + +`nargo new circuit` + +And... That's about it. Your program is ready to be compiled and run. + +To compile, let's `cd` into the `circuit` folder to enter our project, and call: + +`nargo compile` + +This compiles our circuit into `json` format and add it to a new `target` folder. + +:::info + +At this point in the tutorial, your folder structure should look like this: + +```tree +. +└── circuit <---- our working directory + ├── Nargo.toml + ├── src + │ └── main.nr + └── target + └── circuit.json +``` + +::: + +### Node and Vite + +If you want to explore Nargo, feel free to go on a side-quest now and follow the steps in the +[getting started](../getting_started/hello_noir/index.md) guide. However, we want our app to run on the browser, so we need Vite. + +Vite is a powerful tool to generate static websites. While it provides all kinds of features, let's just go barebones with some good old vanilla JS. + +To do this this, go back to the previous folder (`cd ..`) and create a new vite project by running `npm create vite` and choosing "Vanilla" and "Javascript". + +A wild `vite-project` directory should now appear in your root folder! Let's not waste any time and dive right in: + +```bash +cd vite-project +``` + +### Setting Up Vite and Configuring the Project + +Before we proceed with any coding, let's get our environment tailored for Noir. We'll start by laying down the foundations with a `vite.config.js` file. This little piece of configuration is our secret sauce for making sure everything meshes well with the NoirJS libraries and other special setups we might need, like handling WebAssembly modules. Here’s how you get that going: + +#### Creating the vite.config.js + +In your freshly minted `vite-project` folder, create a new file named `vite.config.js` and open it in your code editor. Paste the following to set the stage: + +```javascript +import { defineConfig } from "vite"; +import copy from "rollup-plugin-copy"; + +export default defineConfig({ + esbuild: { + target: "esnext", + }, + optimizeDeps: { + esbuildOptions: { + target: "esnext", + }, + }, + plugins: [ + copy({ + targets: [ + { src: "node_modules/**/*.wasm", dest: "node_modules/.vite/dist" }, + ], + copySync: true, + hook: "buildStart", + }), + ], + server: { + port: 3000, + }, +}); +``` + +#### Install Dependencies + +Now that our stage is set, install the necessary NoirJS packages along with our other dependencies: + +```bash +npm install && npm install @noir-lang/backend_barretenberg@0.27.0 @noir-lang/noir_js@0.27.0 +npm install rollup-plugin-copy --save-dev +``` + +:::info + +At this point in the tutorial, your folder structure should look like this: + +```tree +. +└── circuit + └── ...etc... +└── vite-project <---- our working directory + └── ...etc... +``` + +::: + +#### Some cleanup + +`npx create vite` is amazing but it creates a bunch of files we don't really need for our simple example. Actually, let's just delete everything except for `vite.config.js`, `index.html`, `main.js` and `package.json`. I feel lighter already. + +![my heart is ready for you, noir.js](@site/static/img/memes/titanic.jpeg) + +## HTML + +Our app won't run like this, of course. We need some working HTML, at least. Let's open our broken-hearted `index.html` and replace everything with this code snippet: + +```html + + + + + + +

Noir app

+
+ + +
+
+

Logs

+

Proof

+
+ + +``` + +It _could_ be a beautiful UI... Depending on which universe you live in. + +## Some good old vanilla Javascript + +Our love for Noir needs undivided attention, so let's just open `main.js` and delete everything (this is where the romantic scenery becomes a bit creepy). + +Start by pasting in this boilerplate code: + +```js +const setup = async () => { + await Promise.all([ + import('@noir-lang/noirc_abi').then((module) => + module.default(new URL('@noir-lang/noirc_abi/web/noirc_abi_wasm_bg.wasm', import.meta.url).toString()), + ), + import('@noir-lang/acvm_js').then((module) => + module.default(new URL('@noir-lang/acvm_js/web/acvm_js_bg.wasm', import.meta.url).toString()), + ), + ]); +}; + +function display(container, msg) { + const c = document.getElementById(container); + const p = document.createElement('p'); + p.textContent = msg; + c.appendChild(p); +} + +document.getElementById('submitGuess').addEventListener('click', async () => { + try { + // here's where love happens + } catch (err) { + display('logs', 'Oh 💔 Wrong guess'); + } +}); +``` + +The display function doesn't do much. We're simply manipulating our website to see stuff happening. For example, if the proof fails, it will simply log a broken heart 😢 + +As for the `setup` function, it's just a sad reminder that dealing with `wasm` on the browser is not as easy as it should. Just copy, paste, and forget. + +:::info + +At this point in the tutorial, your folder structure should look like this: + +```tree +. +└── circuit + └── ...same as above +└── vite-project + ├── vite.config.js + ├── main.js + ├── package.json + └── index.html +``` + +You'll see other files and folders showing up (like `package-lock.json`, `node_modules`) but you shouldn't have to care about those. + +::: + +## Some NoirJS + +We're starting with the good stuff now. If you've compiled the circuit as described above, you should have a `json` file we want to import at the very top of our `main.js` file: + +```ts +import circuit from '../circuit/target/circuit.json'; +``` + +[Noir is backend-agnostic](../index.mdx#whats-new-about-noir). We write Noir, but we also need a proving backend. That's why we need to import and instantiate the two dependencies we installed above: `BarretenbergBackend` and `Noir`. Let's import them right below: + +```js +import { BarretenbergBackend, BarretenbergVerifier as Verifier } from '@noir-lang/backend_barretenberg'; +import { Noir } from '@noir-lang/noir_js'; +``` + +And instantiate them inside our try-catch block: + +```ts +// try { +const backend = new BarretenbergBackend(circuit); +const noir = new Noir(circuit, backend); +// } +``` + +:::note + +For the remainder of the tutorial, everything will be happening inside the `try` block + +::: + +## Our app + +Now for the app itself. We're capturing whatever is in the input when people press the submit button. Just add this: + +```js +const x = parseInt(document.getElementById('guessInput').value); +const input = { x, y: 2 }; +``` + +Now we're ready to prove stuff! Let's feed some inputs to our circuit and calculate the proof: + +```js +await setup(); // let's squeeze our wasm inits here + +display('logs', 'Generating proof... ⌛'); +const proof = await noir.generateProof(input); +display('logs', 'Generating proof... ✅'); +display('results', proof.proof); +``` + +You're probably eager to see stuff happening, so go and run your app now! + +From your terminal, run `npm run dev`. If it doesn't open a browser for you, just visit `localhost:5173`. You should now see the worst UI ever, with an ugly input. + +![Getting Started 0](@site/static/img/noir_getting_started_1.png) + +Now, our circuit says `fn main(x: Field, y: pub Field)`. This means only the `y` value is public, and it's hardcoded above: `input = { x, y: 2 }`. In other words, you won't need to send your secret`x` to the verifier! + +By inputting any number other than 2 in the input box and clicking "submit", you should get a valid proof. Otherwise the proof won't even generate correctly. By the way, if you're human, you shouldn't be able to understand anything on the "proof" box. That's OK. We like you, human ❤️. + +## Verifying + +Time to celebrate, yes! But we shouldn't trust machines so blindly. Let's add these lines to see our proof being verified: + +```js +display('logs', 'Verifying proof... ⌛'); +const verificationKey = await backend.getVerificationKey(); +const verifier = new Verifier(); +const isValid = await verifier.verifyProof(proof, verificationKey); +if (isValid) display('logs', 'Verifying proof... ✅'); +``` + +You have successfully generated a client-side Noir web app! + +![coded app without math knowledge](@site/static/img/memes/flextape.jpeg) + +## Further Reading + +You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. + +You should also check out the more advanced examples in the [noir-examples repo](https://github.com/noir-lang/noir-examples), where you'll find reference usage for some cool apps. diff --git a/noir/noir-repo/docs/versioned_sidebars/version-v0.28.0-sidebars.json b/noir/noir-repo/docs/versioned_sidebars/version-v0.28.0-sidebars.json new file mode 100644 index 00000000000..b9ad026f69f --- /dev/null +++ b/noir/noir-repo/docs/versioned_sidebars/version-v0.28.0-sidebars.json @@ -0,0 +1,93 @@ +{ + "sidebar": [ + { + "type": "doc", + "id": "index" + }, + { + "type": "category", + "label": "Getting Started", + "items": [ + { + "type": "autogenerated", + "dirName": "getting_started" + } + ] + }, + { + "type": "category", + "label": "The Noir Language", + "items": [ + { + "type": "autogenerated", + "dirName": "noir" + } + ] + }, + { + "type": "html", + "value": "
", + "defaultStyle": true + }, + { + "type": "category", + "label": "How To Guides", + "items": [ + { + "type": "autogenerated", + "dirName": "how_to" + } + ] + }, + { + "type": "category", + "label": "Explainers", + "items": [ + { + "type": "autogenerated", + "dirName": "explainers" + } + ] + }, + { + "type": "category", + "label": "Tutorials", + "items": [ + { + "type": "autogenerated", + "dirName": "tutorials" + } + ] + }, + { + "type": "category", + "label": "Reference", + "items": [ + { + "type": "autogenerated", + "dirName": "reference" + } + ] + }, + { + "type": "category", + "label": "Tooling", + "items": [ + { + "type": "autogenerated", + "dirName": "tooling" + } + ] + }, + { + "type": "html", + "value": "
", + "defaultStyle": true + }, + { + "type": "doc", + "id": "migration_notes", + "label": "Migration notes" + } + ] +} diff --git a/noir/noir-repo/test_programs/execution_success/regression_4383/Nargo.toml b/noir/noir-repo/test_programs/execution_success/regression_4383/Nargo.toml new file mode 100644 index 00000000000..2ad8c208f24 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_4383/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "regression_4383" +type = "bin" +authors = [""] +compiler_version = ">=0.26.0" + +[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/regression_4383/src/main.nr b/noir/noir-repo/test_programs/execution_success/regression_4383/src/main.nr new file mode 100644 index 00000000000..fec63ea64a5 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_4383/src/main.nr @@ -0,0 +1,3 @@ +fn main() { + assert([[1]] == [[1]]); +} diff --git a/noir/noir-repo/tooling/debugger/src/context.rs b/noir/noir-repo/tooling/debugger/src/context.rs index 9b535075484..a423016eacf 100644 --- a/noir/noir-repo/tooling/debugger/src/context.rs +++ b/noir/noir-repo/tooling/debugger/src/context.rs @@ -34,6 +34,7 @@ pub(super) struct DebugContext<'a, B: BlackBoxFunctionSolver> { debug_artifact: &'a DebugArtifact, breakpoints: HashSet, source_to_opcodes: BTreeMap>, + unconstrained_functions: &'a [BrilligBytecode], } impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { @@ -59,6 +60,7 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { debug_artifact, breakpoints: HashSet::new(), source_to_opcodes, + unconstrained_functions, } } @@ -215,7 +217,9 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { self.get_opcodes() .iter() .map(|opcode| match opcode { - Opcode::Brillig(brillig_block) => brillig_block.bytecode.len(), + Opcode::BrilligCall { id, .. } => { + self.unconstrained_functions[*id as usize].bytecode.len() + } _ => 1, }) .collect() @@ -296,19 +300,22 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { None => String::from("invalid"), Some(OpcodeLocation::Acir(acir_index)) => { let opcode = &opcodes[*acir_index]; - if let Opcode::Brillig(ref brillig) = opcode { - let first_opcode = &brillig.bytecode[0]; - format!("BRILLIG {first_opcode:?}") - } else { - format!("{opcode:?}") + match opcode { + Opcode::BrilligCall { id, .. } => { + let first_opcode = &self.unconstrained_functions[*id as usize].bytecode[0]; + format!("BRILLIG CALL {first_opcode:?}") + } + _ => format!("{opcode:?}"), } } Some(OpcodeLocation::Brillig { acir_index, brillig_index }) => { - if let Opcode::Brillig(ref brillig) = opcodes[*acir_index] { - let opcode = &brillig.bytecode[*brillig_index]; - format!(" | {opcode:?}") - } else { - String::from(" | invalid") + match &opcodes[*acir_index] { + Opcode::BrilligCall { id, .. } => { + let bytecode = &self.unconstrained_functions[*id as usize].bytecode; + let opcode = &bytecode[*brillig_index]; + format!(" | {opcode:?}") + } + _ => String::from(" | invalid"), } } } @@ -400,7 +407,7 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { return self.step_brillig_opcode(); } - match self.acvm.step_into_brillig_opcode() { + match self.acvm.step_into_brillig() { StepResult::IntoBrillig(solver) => { self.brillig_solver = Some(solver); self.step_brillig_opcode() @@ -409,20 +416,6 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { } } - fn currently_executing_brillig(&self) -> bool { - if self.brillig_solver.is_some() { - return true; - } - - match self.get_current_opcode_location() { - Some(OpcodeLocation::Brillig { .. }) => true, - Some(OpcodeLocation::Acir(acir_index)) => { - matches!(self.get_opcodes()[acir_index], Opcode::Brillig(_)) - } - _ => false, - } - } - fn get_current_acir_index(&self) -> Option { self.get_current_opcode_location().map(|opcode_location| match opcode_location { OpcodeLocation::Acir(acir_index) => acir_index, @@ -446,8 +439,22 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { } } + pub(super) fn is_executing_brillig(&self) -> bool { + if self.brillig_solver.is_some() { + return true; + } + + match self.get_current_opcode_location() { + Some(OpcodeLocation::Brillig { .. }) => true, + Some(OpcodeLocation::Acir(acir_index)) => { + matches!(self.get_opcodes()[acir_index], Opcode::BrilligCall { .. }) + } + _ => false, + } + } + pub(super) fn step_acir_opcode(&mut self) -> DebugCommandResult { - if self.currently_executing_brillig() { + if self.is_executing_brillig() { self.step_out_of_brillig_opcode() } else { let status = self.acvm.solve_opcode(); @@ -511,12 +518,6 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { } } - pub(super) fn is_executing_brillig(&self) -> bool { - let opcodes = self.get_opcodes(); - let acir_index = self.acvm.instruction_pointer(); - acir_index < opcodes.len() && matches!(opcodes[acir_index], Opcode::Brillig(..)) - } - pub(super) fn get_brillig_memory(&self) -> Option<&[MemoryValue]> { self.brillig_solver.as_ref().map(|solver| solver.get_memory()) } @@ -552,15 +553,17 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { match *location { OpcodeLocation::Acir(acir_index) => acir_index < opcodes.len(), OpcodeLocation::Brillig { acir_index, brillig_index } => { - acir_index < opcodes.len() - && matches!(opcodes[acir_index], Opcode::Brillig(..)) - && { - if let Opcode::Brillig(ref brillig) = opcodes[acir_index] { - brillig_index < brillig.bytecode.len() - } else { - false + if acir_index < opcodes.len() { + match &opcodes[acir_index] { + Opcode::BrilligCall { id, .. } => { + let bytecode = &self.unconstrained_functions[*id as usize].bytecode; + brillig_index < bytecode.len() } + _ => false, } + } else { + false + } } } } @@ -649,7 +652,7 @@ mod tests { use acvm::{ acir::{ circuit::{ - brillig::{Brillig, BrilligInputs, BrilligOutputs}, + brillig::{BrilligInputs, BrilligOutputs}, opcodes::BlockId, }, native_types::Expression, @@ -666,12 +669,7 @@ mod tests { let fe_1 = FieldElement::one(); let w_x = Witness(1); - let brillig_opcodes = Brillig { - inputs: vec![BrilligInputs::Single(Expression { - linear_combinations: vec![(fe_1, w_x)], - ..Expression::default() - })], - outputs: vec![], + let brillig_bytecode = BrilligBytecode { bytecode: vec![ BrilligOpcode::CalldataCopy { destination_address: MemoryAddress(0), @@ -692,9 +690,17 @@ mod tests { }, BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 0 }, ], - predicate: None, }; - let opcodes = vec![Opcode::Brillig(brillig_opcodes)]; + let opcodes = vec![Opcode::BrilligCall { + id: 0, + inputs: vec![BrilligInputs::Single(Expression { + linear_combinations: vec![(fe_1, w_x)], + ..Expression::default() + })], + outputs: vec![], + predicate: None, + }]; + let brillig_funcs = &vec![brillig_bytecode]; let current_witness_index = 2; let circuit = &Circuit { current_witness_index, opcodes, ..Circuit::default() }; @@ -707,7 +713,6 @@ mod tests { let foreign_call_executor = Box::new(DefaultDebugForeignCallExecutor::from_artifact(true, debug_artifact)); - let brillig_funcs = &vec![]; let mut context = DebugContext::new( &StubbedBlackBoxSolver, circuit, @@ -766,18 +771,7 @@ mod tests { let w_z = Witness(3); // This Brillig block is equivalent to: z = x + y - let brillig_opcodes = Brillig { - inputs: vec![ - BrilligInputs::Single(Expression { - linear_combinations: vec![(fe_1, w_x)], - ..Expression::default() - }), - BrilligInputs::Single(Expression { - linear_combinations: vec![(fe_1, w_y)], - ..Expression::default() - }), - ], - outputs: vec![BrilligOutputs::Simple(w_z)], + let brillig_bytecode = BrilligBytecode { bytecode: vec![ BrilligOpcode::CalldataCopy { destination_address: MemoryAddress(0), @@ -792,11 +786,24 @@ mod tests { }, BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 1 }, ], - predicate: None, }; let opcodes = vec![ // z = x + y - Opcode::Brillig(brillig_opcodes), + Opcode::BrilligCall { + id: 0, + inputs: vec![ + BrilligInputs::Single(Expression { + linear_combinations: vec![(fe_1, w_x)], + ..Expression::default() + }), + BrilligInputs::Single(Expression { + linear_combinations: vec![(fe_1, w_y)], + ..Expression::default() + }), + ], + outputs: vec![BrilligOutputs::Simple(w_z)], + predicate: None, + }, // x + y - z = 0 Opcode::AssertZero(Expression { mul_terms: vec![], @@ -816,7 +823,7 @@ mod tests { let foreign_call_executor = Box::new(DefaultDebugForeignCallExecutor::from_artifact(true, debug_artifact)); - let brillig_funcs = &vec![]; + let brillig_funcs = &vec![brillig_bytecode]; let mut context = DebugContext::new( &StubbedBlackBoxSolver, circuit, @@ -848,34 +855,24 @@ mod tests { #[test] fn test_offset_opcode_location() { + let brillig_bytecode = BrilligBytecode { + bytecode: vec![ + BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 0 }, + BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 0 }, + BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 0 }, + ], + }; + let opcodes = vec![ - Opcode::Brillig(Brillig { - inputs: vec![], - outputs: vec![], - bytecode: vec![ - BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 0 }, - BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 0 }, - BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 0 }, - ], - predicate: None, - }), + Opcode::BrilligCall { id: 0, inputs: vec![], outputs: vec![], predicate: None }, Opcode::MemoryInit { block_id: BlockId(0), init: vec![] }, - Opcode::Brillig(Brillig { - inputs: vec![], - outputs: vec![], - bytecode: vec![ - BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 0 }, - BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 0 }, - BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 0 }, - ], - predicate: None, - }), + Opcode::BrilligCall { id: 0, inputs: vec![], outputs: vec![], predicate: None }, Opcode::AssertZero(Expression::default()), ]; let circuit = Circuit { opcodes, ..Circuit::default() }; let debug_artifact = DebugArtifact { debug_symbols: vec![], file_map: BTreeMap::new(), warnings: vec![] }; - let brillig_funcs = &vec![]; + let brillig_funcs = &vec![brillig_bytecode]; let context = DebugContext::new( &StubbedBlackBoxSolver, &circuit, diff --git a/noir/noir-repo/tooling/debugger/src/repl.rs b/noir/noir-repo/tooling/debugger/src/repl.rs index 2a92698e5ce..8f908a38ffc 100644 --- a/noir/noir-repo/tooling/debugger/src/repl.rs +++ b/noir/noir-repo/tooling/debugger/src/repl.rs @@ -3,6 +3,7 @@ use crate::context::{DebugCommandResult, DebugContext}; use acvm::acir::circuit::brillig::BrilligBytecode; use acvm::acir::circuit::{Circuit, Opcode, OpcodeLocation}; use acvm::acir::native_types::{Witness, WitnessMap}; +use acvm::brillig_vm::brillig::Opcode as BrilligOpcode; use acvm::{BlackBoxFunctionSolver, FieldElement}; use crate::foreign_calls::DefaultDebugForeignCallExecutor; @@ -68,23 +69,18 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { Some(location) => { match location { OpcodeLocation::Acir(ip) => { - // Default Brillig display is too bloated for this context, - // so we limit it to denoting it's the start of a Brillig - // block. The user can still use the `opcodes` command to - // take a look at the whole block. - let opcode_summary = match opcodes[ip] { - Opcode::Brillig(..) => "BRILLIG: ...".into(), - _ => format!("{}", opcodes[ip]), - }; - println!("At opcode {}: {}", ip, opcode_summary); + println!("At opcode {}: {}", ip, opcodes[ip]); } OpcodeLocation::Brillig { acir_index, brillig_index } => { - let Opcode::Brillig(ref brillig) = opcodes[acir_index] else { - unreachable!("Brillig location does not contain a Brillig block"); - }; + let brillig_bytecode = + if let Opcode::BrilligCall { id, .. } = opcodes[acir_index] { + &self.unconstrained_functions[id as usize].bytecode + } else { + unreachable!("Brillig location does not contain Brillig opcodes"); + }; println!( "At opcode {}.{}: {:?}", - acir_index, brillig_index, brillig.bytecode[brillig_index] + acir_index, brillig_index, brillig_bytecode[brillig_index] ); } } @@ -104,12 +100,15 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { ) } OpcodeLocation::Brillig { acir_index, brillig_index } => { - let Opcode::Brillig(ref brillig) = opcodes[*acir_index] else { - unreachable!("Brillig location does not contain a Brillig block"); + let brillig_bytecode = if let Opcode::BrilligCall { id, .. } = opcodes[*acir_index] + { + &self.unconstrained_functions[id as usize].bytecode + } else { + unreachable!("Brillig location does not contain Brillig opcodes"); }; println!( "Frame #{index}, opcode {}.{}: {:?}", - acir_index, brillig_index, brillig.bytecode[*brillig_index] + acir_index, brillig_index, brillig_bytecode[*brillig_index] ); } } @@ -162,22 +161,30 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { "" } }; + let print_brillig_bytecode = |acir_index, bytecode: &[BrilligOpcode]| { + for (brillig_index, brillig_opcode) in bytecode.iter().enumerate() { + println!( + "{:>3}.{:<2} |{:2} {:?}", + acir_index, + brillig_index, + brillig_marker(acir_index, brillig_index), + brillig_opcode + ); + } + }; for (acir_index, opcode) in opcodes.iter().enumerate() { let marker = outer_marker(acir_index); - if let Opcode::Brillig(brillig) = opcode { - println!("{:>3} {:2} BRILLIG inputs={:?}", acir_index, marker, brillig.inputs); - println!(" | outputs={:?}", brillig.outputs); - for (brillig_index, brillig_opcode) in brillig.bytecode.iter().enumerate() { + match &opcode { + Opcode::BrilligCall { id, inputs, outputs, .. } => { println!( - "{:>3}.{:<2} |{:2} {:?}", - acir_index, - brillig_index, - brillig_marker(acir_index, brillig_index), - brillig_opcode + "{:>3} {:2} BRILLIG CALL id={} inputs={:?}", + acir_index, marker, id, inputs ); + println!(" | outputs={:?}", outputs); + let bytecode = &self.unconstrained_functions[*id as usize].bytecode; + print_brillig_bytecode(acir_index, bytecode); } - } else { - println!("{:>3} {:2} {:?}", acir_index, marker, opcode); + _ => println!("{:>3} {:2} {:?}", acir_index, marker, opcode), } } } diff --git a/noir/noir-repo/tooling/noir_codegen/package.json b/noir/noir-repo/tooling/noir_codegen/package.json index 569841b2c6a..2a90e9374df 100644 --- a/noir/noir-repo/tooling/noir_codegen/package.json +++ b/noir/noir-repo/tooling/noir_codegen/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.27.0", + "version": "0.28.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", diff --git a/noir/noir-repo/tooling/noir_js/package.json b/noir/noir-repo/tooling/noir_js/package.json index 838f317c622..c5bb5af9dfa 100644 --- a/noir/noir-repo/tooling/noir_js/package.json +++ b/noir/noir-repo/tooling/noir_js/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.27.0", + "version": "0.28.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", diff --git a/noir/noir-repo/tooling/noir_js_types/package.json b/noir/noir-repo/tooling/noir_js_types/package.json index 316612a7c51..a356a771b2a 100644 --- a/noir/noir-repo/tooling/noir_js_types/package.json +++ b/noir/noir-repo/tooling/noir_js_types/package.json @@ -4,7 +4,7 @@ "The Noir Team " ], "packageManager": "yarn@3.5.1", - "version": "0.27.0", + "version": "0.28.0", "license": "(MIT OR Apache-2.0)", "homepage": "https://noir-lang.org/", "repository": { diff --git a/noir/noir-repo/tooling/noirc_abi_wasm/package.json b/noir/noir-repo/tooling/noirc_abi_wasm/package.json index 0e4aaceeae3..701c843456a 100644 --- a/noir/noir-repo/tooling/noirc_abi_wasm/package.json +++ b/noir/noir-repo/tooling/noirc_abi_wasm/package.json @@ -3,7 +3,7 @@ "contributors": [ "The Noir Team " ], - "version": "0.27.0", + "version": "0.28.0", "license": "(MIT OR Apache-2.0)", "homepage": "https://noir-lang.org/", "repository": { diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 95c28c7b512..f7a6a3e7d1c 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -2857,7 +2857,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/noirc_abi@portal:../noir/packages/noirc_abi::locator=%40aztec%2Faztec3-packages%40workspace%3A." dependencies: - "@noir-lang/types": 0.27.0 + "@noir-lang/types": 0.28.0 languageName: node linkType: soft From e2842a6e02bd42792c8ee8ca03316e5aa4902f5b Mon Sep 17 00:00:00 2001 From: just-mitch <68168980+just-mitch@users.noreply.github.com> Date: Wed, 24 Apr 2024 14:17:22 -0600 Subject: [PATCH 010/201] chore: add target for individual e2e tests (#6009) Support running individual e2e tests like `earthly ./yarn-project/end-to-end/+e2e-test-single --test=e2e_fees` --- .github/workflows/ci.yml | 57 +++++++++++++++++++++++++------ yarn-project/end-to-end/Earthfile | 5 +++ 2 files changed, 51 insertions(+), 11 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 23c4d074d2f..6dada291720 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,7 +6,7 @@ on: workflow_dispatch: inputs: username: - description: 'Defaults to GitHub Actor' + description: "Defaults to GitHub Actor" required: false runner_action: description: "The action to take with the self-hosted runner (start, stop, restart)." @@ -34,7 +34,10 @@ jobs: outputs: e2e_list: ${{ steps.e2e_list.outputs.list }} steps: - - {uses: actions/checkout@v4, with: { ref: "${{ github.event.pull_request.head.sha }}"}} + - { + uses: actions/checkout@v4, + with: { ref: "${{ github.event.pull_request.head.sha }}" }, + } - uses: ./.github/ci-setup-action with: dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" @@ -47,7 +50,7 @@ jobs: # (Note ARM uses just 2 tests as a smoketest) - name: Create list of end-to-end jobs id: e2e_list - run: echo "list=$(earthly ls ./yarn-project/end-to-end | grep -v '+base' | sed 's/+//' | jq -R . | jq -cs .)" >> $GITHUB_OUTPUT + run: echo "list=$(earthly ls ./yarn-project/end-to-end | grep -v -E '(\+base)|(\+e2e-test-single)' | sed 's/+//' | jq -R . | jq -cs .)" >> $GITHUB_OUTPUT # all the end-to-end integration tests for aztec e2e: @@ -58,7 +61,10 @@ jobs: matrix: test: ${{ fromJson( needs.build.outputs.e2e_list )}} steps: - - {uses: actions/checkout@v4, with: { ref: "${{ github.event.pull_request.head.sha }}"}} + - { + uses: actions/checkout@v4, + with: { ref: "${{ github.event.pull_request.head.sha }}" }, + } - uses: ./.github/ci-setup-action with: dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" @@ -78,7 +84,10 @@ jobs: needs: setup runs-on: ${{ inputs.username || github.actor }}-x86 steps: - - {uses: actions/checkout@v4, with: { ref: "${{ github.event.pull_request.head.sha }}"}} + - { + uses: actions/checkout@v4, + with: { ref: "${{ github.event.pull_request.head.sha }}" }, + } # Only allow one memory-hunger prover test to use this runner - uses: ./.github/ci-setup-action with: @@ -95,7 +104,10 @@ jobs: needs: setup runs-on: ${{ github.actor }}-x86 steps: - - {uses: actions/checkout@v4, with: { ref: "${{ github.event.pull_request.head.sha }}"}} + - { + uses: actions/checkout@v4, + with: { ref: "${{ github.event.pull_request.head.sha }}" }, + } # Only allow one memory-hunger prover test to use this runner - uses: ./.github/ci-setup-action with: @@ -109,7 +121,10 @@ jobs: needs: setup runs-on: ${{ github.actor }}-x86 steps: - - {uses: actions/checkout@v4, with: { ref: "${{ github.event.pull_request.head.sha }}"}} + - { + uses: actions/checkout@v4, + with: { ref: "${{ github.event.pull_request.head.sha }}" }, + } # Only allow one memory-hunger prover test to use this runner - uses: ./.github/ci-setup-action with: @@ -124,7 +139,10 @@ jobs: needs: setup runs-on: ${{ inputs.username || github.actor }}-x86 steps: - - {uses: actions/checkout@v4, with: { ref: "${{ github.event.pull_request.head.sha }}"}} + - { + uses: actions/checkout@v4, + with: { ref: "${{ github.event.pull_request.head.sha }}" }, + } - uses: ./.github/ci-setup-action with: dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" @@ -151,7 +169,10 @@ jobs: runs-on: ${{ inputs.username || github.actor }}-bench-x86 needs: setup-bench steps: - - {uses: actions/checkout@v4, with: { ref: "${{ github.event.pull_request.head.sha }}"}} + - { + uses: actions/checkout@v4, + with: { ref: "${{ github.event.pull_request.head.sha }}" }, + } - uses: ./.github/ci-setup-action with: dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" @@ -169,12 +190,26 @@ jobs: merge-check: runs-on: ubuntu-latest - needs: [e2e, bb-native-tests, bb-bench, yarn-project-formatting, yarn-project-test] + needs: + [ + e2e, + bb-native-tests, + bb-bench, + yarn-project-formatting, + yarn-project-test, + ] steps: - run: echo Pull request merging now allowed. notify: - needs: [e2e, bb-native-tests, bb-bench, yarn-project-formatting, yarn-project-test] + needs: + [ + e2e, + bb-native-tests, + bb-bench, + yarn-project-formatting, + yarn-project-test, + ] runs-on: ubuntu-latest if: ${{ github.ref == 'refs/heads/master' && failure() }} steps: diff --git a/yarn-project/end-to-end/Earthfile b/yarn-project/end-to-end/Earthfile index 59403a2400c..fd46ad8905b 100644 --- a/yarn-project/end-to-end/Earthfile +++ b/yarn-project/end-to-end/Earthfile @@ -86,6 +86,11 @@ e2e-tests: FROM ../+end-to-end RUN yarn test ./src/e2e +e2e-test-single: + ARG test + FROM ../+end-to-end + RUN yarn test $test + flakey-e2e-tests: FROM ../+end-to-end RUN yarn test --passWithNoTests ./src/flakey || true From 6894fc759cc4cd4e77d297fe6164cd39478ece4a Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Wed, 24 Apr 2024 20:00:46 -0300 Subject: [PATCH 011/201] feat: Wire gas from public execution to kernels (#5941) Sets gas used and transaction fee in the public executor and public kernels. Implements changes as defined in https://github.com/AztecProtocol/aztec-packages/pull/5855/ except for those related to enshrining fee payment (eg `fee_payer`). Suggested reviewing per-commit. --- noir-projects/noir-contracts/bootstrap.sh | 3 +- .../noir-contracts/scripts/transpile.sh | 5 ++ .../kernel_circuit_public_inputs_composer.nr | 15 +++- .../src/private_kernel_init.nr | 4 + .../src/private_kernel_tail.nr | 16 +++- .../src/private_kernel_tail_to_public.nr | 16 +++- .../crates/public-kernel-lib/src/common.nr | 16 ++++ .../src/public_kernel_app_logic.nr | 21 +++++ .../src/public_kernel_setup.nr | 21 +++++ .../src/public_kernel_teardown.nr | 70 ++++++++++++++- .../private_accumulated_data_builder.nr | 21 ++++- .../crates/types/src/abis/gas.nr | 16 +++- .../crates/types/src/abis/gas_fees.nr | 4 + ...te_kernel_circuit_public_inputs_builder.nr | 4 + .../crates/types/src/tests/fixture_builder.nr | 8 +- .../src/tests/private_call_data_builder.nr | 21 ++--- .../private_circuit_public_inputs_builder.nr | 6 +- .../aztec-node/src/aztec-node/server.ts | 14 ++- .../contract/contract_function_interaction.ts | 2 +- .../src/interfaces/aztec-node.ts | 4 +- yarn-project/circuit-types/src/mocks.ts | 18 +++- .../circuit-types/src/tx/simulated_tx.test.ts | 2 +- .../circuit-types/src/tx/simulated_tx.ts | 70 +++++++++------ yarn-project/circuits.js/src/structs/gas.ts | 14 ++- .../circuits.js/src/tests/factories.ts | 6 +- .../end-to-end/src/e2e_avm_simulator.test.ts | 16 ++++ .../prover-client/src/mocks/test_context.ts | 21 ++++- .../pxe/src/pxe_service/pxe_service.ts | 4 +- yarn-project/simulator/src/mocks/fixtures.ts | 7 +- .../src/public/abstract_phase_manager.ts | 39 +++++--- .../simulator/src/public/execution.ts | 6 +- yarn-project/simulator/src/public/executor.ts | 30 +++++-- .../simulator/src/public/index.test.ts | 44 ++++----- .../src/public/public_execution_context.ts | 13 ++- .../src/public/public_processor.test.ts | 89 ++++++++++++++++--- .../src/public/teardown_phase_manager.ts | 18 ++++ .../src/public/transitional_adaptors.ts | 16 +++- 37 files changed, 565 insertions(+), 135 deletions(-) create mode 100755 noir-projects/noir-contracts/scripts/transpile.sh diff --git a/noir-projects/noir-contracts/bootstrap.sh b/noir-projects/noir-contracts/bootstrap.sh index d843e9cdc7d..b5bf4764cb2 100755 --- a/noir-projects/noir-contracts/bootstrap.sh +++ b/noir-projects/noir-contracts/bootstrap.sh @@ -20,5 +20,4 @@ NARGO=${NARGO:-../../noir/noir-repo/target/release/nargo} $NARGO compile --silence-warnings echo "Transpiling avm contracts... (only '#[aztec(public-vm)]')" -TRANSPILER=${TRANSPILER:-../../avm-transpiler/target/release/avm-transpiler} -ls target/avm_*.json | parallel "$TRANSPILER {} {}" \ No newline at end of file +scripts/transpile.sh \ No newline at end of file diff --git a/noir-projects/noir-contracts/scripts/transpile.sh b/noir-projects/noir-contracts/scripts/transpile.sh new file mode 100755 index 00000000000..9bea61f5ffa --- /dev/null +++ b/noir-projects/noir-contracts/scripts/transpile.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash +set -eu + +TRANSPILER=${TRANSPILER:-../../avm-transpiler/target/release/avm-transpiler} +ls target/avm_*.json | parallel "$TRANSPILER {} {}" \ No newline at end of file diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr index 84157b9b95b..689393b9bbf 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/kernel_circuit_public_inputs_composer.nr @@ -22,6 +22,9 @@ fn asc_sort_by_counters(a: T, b: T) -> bool where T: Ordered { a.counter() < b.counter() } +// Builds: +// .finish -> KernelCircuitPublicInputs (from PrivateKernelTailCircuitPrivateInputs) +// .finish_to_public -> PublicKernelCircuitPublicInputs (from PrivateKernelTailToPublicCircuitPrivateInputs) struct KernelCircuitPublicInputsComposer { public_inputs: PrivateKernelCircuitPublicInputsBuilder, previous_kernel: PrivateKernelData, @@ -47,7 +50,7 @@ impl KernelCircuitPublicInputsComposer { sorted_encrypted_log_hashes: [SideEffect; MAX_ENCRYPTED_LOGS_PER_TX], sorted_encrypted_log_hashes_indexes: [u64; MAX_ENCRYPTED_LOGS_PER_TX], sorted_unencrypted_log_hashes: [SideEffect; MAX_UNENCRYPTED_LOGS_PER_TX], - sorted_unencrypted_log_hashes_indexes: [u64; MAX_UNENCRYPTED_LOGS_PER_TX], + sorted_unencrypted_log_hashes_indexes: [u64; MAX_UNENCRYPTED_LOGS_PER_TX] ) -> Self { let public_inputs = PrivateKernelCircuitPublicInputsBuilder::empty(); @@ -62,7 +65,7 @@ impl KernelCircuitPublicInputsComposer { sorted_encrypted_log_hashes, sorted_encrypted_log_hashes_indexes, sorted_unencrypted_log_hashes, - sorted_unencrypted_log_hashes_indexes, + sorted_unencrypted_log_hashes_indexes } } @@ -82,6 +85,8 @@ impl KernelCircuitPublicInputsComposer { self.silo_values(); + self.set_gas_used(); + *self } @@ -102,6 +107,12 @@ impl KernelCircuitPublicInputsComposer { self.public_inputs.finish_to_public(min_revertible_side_effect_counter) } + fn set_gas_used(&mut self) { + // TODO(gas): Compute DA gas used here and add to public_inputs.(end,end_non_revertible).gas_used + let teardown_gas = self.previous_kernel.public_inputs.constants.tx_context.gas_settings.teardown_gas_limits; + self.public_inputs.end.gas_used = teardown_gas; + } + fn silo_values(&mut self) { self.silo_note_hashes(); // TODO: Move siloing from init/inner circuits to here. diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_init.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_init.nr index 64bc2e703dd..b348adc4ad0 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_init.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_init.nr @@ -50,6 +50,10 @@ impl PrivateKernelInitCircuitPrivateInputs { // Ensure we are passing the correct arguments to the function. let args_match = tx_request.args_hash == call_stack_item.public_inputs.args_hash; assert(args_match, "noir function args passed to tx_request must match args in the call_stack_item"); + // + // Ensure we are passing the correct tx context + let tx_context_matches = tx_request.tx_context == call_stack_item.public_inputs.tx_context; + assert(tx_context_matches, "tx_context in tx_request must match tx_context in call_stack_item"); } fn validate_inputs(self) { diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr index 5d360bb0024..730d2979dee 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail.nr @@ -57,7 +57,7 @@ impl PrivateKernelTailCircuitPrivateInputs { self.sorted_encrypted_log_hashes, self.sorted_encrypted_log_hashes_indexes, self.sorted_unencrypted_log_hashes, - self.sorted_unencrypted_log_hashes_indexes, + self.sorted_unencrypted_log_hashes_indexes ); composer.compose().finish() } @@ -76,7 +76,7 @@ mod tests { use dep::types::{ abis::{ kernel_circuit_public_inputs::KernelCircuitPublicInputs, max_block_number::MaxBlockNumber, - side_effect::{SideEffect, SideEffectLinkedToNoteHash, Ordered} + side_effect::{SideEffect, SideEffectLinkedToNoteHash, Ordered}, gas::Gas }, grumpkin_private_key::GrumpkinPrivateKey, hash::{compute_note_hash_nonce, compute_unique_siloed_note_hash, accumulate_sha256}, @@ -189,7 +189,7 @@ mod tests { sorted_encrypted_log_hashes_indexes, sorted_unencrypted_log_hashes, sorted_unencrypted_log_hashes_indexes, - master_nullifier_secret_keys: [GrumpkinPrivateKey::empty(); MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX], + master_nullifier_secret_keys: [GrumpkinPrivateKey::empty(); MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX] }; kernel.native_private_kernel_circuit_tail() } @@ -488,4 +488,14 @@ mod tests { builder.previous_kernel.new_nullifiers = BoundedVec::new(); builder.failed(); } + + #[test] + unconstrained fn set_teardown_gas_as_gas_used() { + // TODO(gas): When we compute DA gas used, we'll have to include it here as well. + let mut builder = PrivateKernelTailInputsBuilder::new(); + builder.previous_kernel.tx_context.gas_settings.teardown_gas_limits = Gas::new(300, 300, 300); + let public_inputs = builder.execute(); + + assert_eq(public_inputs.end.gas_used, Gas::new(300, 300, 300)); + } } diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr index 081219df963..bdef6ba568d 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr @@ -57,7 +57,7 @@ impl PrivateKernelTailToPublicCircuitPrivateInputs { self.sorted_encrypted_log_hashes, self.sorted_encrypted_log_hashes_indexes, self.sorted_unencrypted_log_hashes, - self.sorted_unencrypted_log_hashes_indexes, + self.sorted_unencrypted_log_hashes_indexes ); composer.compose_public().finish_to_public() } @@ -75,7 +75,7 @@ mod tests { }; use dep::types::{ abis::{ - kernel_circuit_public_inputs::PublicKernelCircuitPublicInputs, + kernel_circuit_public_inputs::PublicKernelCircuitPublicInputs, gas::Gas, side_effect::{SideEffect, SideEffectLinkedToNoteHash, Ordered} }, grumpkin_private_key::GrumpkinPrivateKey, @@ -196,7 +196,7 @@ mod tests { sorted_encrypted_log_hashes_indexes, sorted_unencrypted_log_hashes, sorted_unencrypted_log_hashes_indexes, - master_nullifier_secret_keys: [GrumpkinPrivateKey::empty(); MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX], + master_nullifier_secret_keys: [GrumpkinPrivateKey::empty(); MAX_NULLIFIER_KEY_VALIDATION_REQUESTS_PER_TX] }; kernel.execute() } @@ -538,4 +538,14 @@ mod tests { assert(is_empty_array(public_inputs.end.new_note_hashes)); assert(is_empty_array(public_inputs.end.new_nullifiers)); } + + #[test] + unconstrained fn set_teardown_gas_as_gas_used() { + // TODO(gas): When we compute DA gas used, we'll have to include it here as well. + let mut builder = PrivateKernelTailToPublicInputsBuilder::new(); + builder.previous_kernel.tx_context.gas_settings.teardown_gas_limits = Gas::new(300, 300, 300); + let public_inputs = builder.execute(); + + assert_eq(public_inputs.end.gas_used, Gas::new(300, 300, 300)); + } } diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/common.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/common.nr index 7cff5c51619..855da0a6ec4 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/common.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/common.nr @@ -229,6 +229,22 @@ pub fn update_non_revertible_gas_used(public_call: PublicCallData, circuit_outpu .sub(accum_end_gas_used); } +// Validates that the start gas injected into the app circuit matches the remaining gas +pub fn validate_start_gas(public_call: PublicCallData, previous_kernel: PublicKernelData) { + let public_call_start_gas = public_call.call_stack_item.public_inputs.start_gas_left; + let tx_gas_limits = previous_kernel.public_inputs.constants.tx_context.gas_settings.gas_limits; + let computed_start_gas = tx_gas_limits.sub(previous_kernel.public_inputs.end.gas_used).sub(previous_kernel.public_inputs.end_non_revertible.gas_used); + assert( + public_call_start_gas == computed_start_gas, "Start gas for public phase does not match transaction gas left" + ); +} + +// Validates the transaction fee injected into the app circuit is zero for non-teardown phases +pub fn validate_transaction_fee_is_zero(public_call: PublicCallData) { + let transaction_fee = public_call.call_stack_item.public_inputs.transaction_fee; + assert(transaction_fee == 0, "Transaction fee must be zero on setup and app phases"); +} + pub fn update_public_end_non_revertible_values( public_call: PublicCallData, circuit_outputs: &mut PublicKernelCircuitPublicInputsBuilder diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_app_logic.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_app_logic.nr index f722f087618..e372e50f43d 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_app_logic.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_app_logic.nr @@ -33,6 +33,9 @@ impl PublicKernelAppLogicCircuitPrivateInputs { // validate the inputs unique to having a previous public kernel self.validate_inputs(); + common::validate_start_gas(self.public_call, self.previous_kernel); + common::validate_transaction_fee_is_zero(self.public_call); + common::update_validation_requests(self.public_call, &mut public_inputs); common::update_revertible_gas_used(self.public_call, &mut public_inputs); @@ -464,4 +467,22 @@ mod tests { assert_eq(output.end.gas_used, Gas::new(500, 500, 500)); assert_eq(output.end_non_revertible.gas_used, Gas::new(0, 0, 0)); } + + #[test(should_fail_with="Start gas for public phase does not match transaction gas left")] + fn validates_start_gas() { + let mut builder = PublicKernelAppLogicCircuitPrivateInputsBuilder::new(); + + builder.public_call.public_inputs.start_gas_left = Gas::new(200, 100, 100); + + builder.failed(); + } + + #[test(should_fail_with="Transaction fee must be zero on setup and app phases")] + fn validates_transaction_fee() { + let mut builder = PublicKernelAppLogicCircuitPrivateInputsBuilder::new(); + + builder.public_call.public_inputs.transaction_fee = 10; + + builder.failed(); + } } diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_setup.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_setup.nr index 9b0f1764a6a..e918d78ad64 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_setup.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_setup.nr @@ -39,6 +39,9 @@ impl PublicKernelSetupCircuitPrivateInputs { // validate the inputs unique to having a previous private kernel self.validate_inputs(); + common::validate_start_gas(self.public_call, self.previous_kernel); + common::validate_transaction_fee_is_zero(self.public_call); + common::update_non_revertible_gas_used(self.public_call, &mut public_inputs); // Pops the item from the call stack and validates it against the current execution. @@ -519,4 +522,22 @@ mod tests { assert_eq(output.end_non_revertible.gas_used, Gas::new(500, 500, 500)); assert_eq(output.end.gas_used, Gas::new(100, 100, 100)); } + + #[test(should_fail_with="Start gas for public phase does not match transaction gas left")] + fn validates_start_gas() { + let mut builder = PublicKernelSetupCircuitPrivateInputsBuilder::new(); + + builder.public_call.public_inputs.start_gas_left = Gas::new(200, 100, 100); + + builder.failed(); + } + + #[test(should_fail_with="Transaction fee must be zero on setup and app phases")] + fn validates_transaction_fee() { + let mut builder = PublicKernelSetupCircuitPrivateInputsBuilder::new(); + + builder.public_call.public_inputs.transaction_fee = 10; + + builder.failed(); + } } diff --git a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_teardown.nr b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_teardown.nr index 67b89c28167..9b19bb71dcc 100644 --- a/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_teardown.nr +++ b/noir-projects/noir-protocol-circuits/crates/public-kernel-lib/src/public_kernel_teardown.nr @@ -1,7 +1,7 @@ use crate::common; use dep::types::abis::{ kernel_circuit_public_inputs::{PublicKernelCircuitPublicInputs, PublicKernelCircuitPublicInputsBuilder}, - kernel_data::PublicKernelData, public_call_data::PublicCallData + kernel_data::PublicKernelData, public_call_data::PublicCallData, gas_fees::GasFees }; struct PublicKernelTeardownCircuitPrivateInputs { @@ -22,6 +22,45 @@ impl PublicKernelTeardownCircuitPrivateInputs { assert(needs_teardown == true, "Cannot run unnecessary teardown circuit"); } + // Validates that the start gas injected into the app circuit matches the teardown gas limits set by the user + fn validate_start_gas(self) { + let public_call_start_gas = self.public_call.call_stack_item.public_inputs.start_gas_left; + let teardown_gas_limit = self.previous_kernel.public_inputs.constants.tx_context.gas_settings.teardown_gas_limits; + assert( + public_call_start_gas == teardown_gas_limit, "Start gas for teardown phase does not match teardown gas allocation" + ); + } + + // Validates the transaction fee injected into the app circuit is properly computed from gas_used and block gas_fees + fn validate_transaction_fee(self) { + let transaction_fee = self.public_call.call_stack_item.public_inputs.transaction_fee; + // Note that teardown_gas is already included in end.gas_used as it was injected by the private kernel + let total_gas_used = self.previous_kernel.public_inputs.end.gas_used.add(self.previous_kernel.public_inputs.end_non_revertible.gas_used); + // TODO(palla/gas): Load gas fees from a PublicConstantData struct that's currently missing + let block_gas_fees = GasFees::default(); + let inclusion_fee = self.previous_kernel.public_inputs.constants.tx_context.gas_settings.inclusion_fee; + let computed_transaction_fee = total_gas_used.compute_fee(block_gas_fees) + inclusion_fee; + + // dep::types::debug_log::debug_log_format( + // "Validating tx fee: total_gas_used.da={0} total_gas_used.l1={1} total_gas_used.l2={2} block_fee_per_gas.da={3} block_fee_per_gas.l1={4} block_fee_per_gas.l2={5} inclusion_fee={6} computed={7} actual={8}", + // [ + // total_gas_used.da_gas as Field, + // total_gas_used.l1_gas as Field, + // total_gas_used.l2_gas as Field, + // block_gas_fees.fee_per_da_gas as Field, + // block_gas_fees.fee_per_l1_gas as Field, + // block_gas_fees.fee_per_l2_gas as Field, + // inclusion_fee, + // computed_transaction_fee, + // transaction_fee + // ] + // ); + + assert( + transaction_fee == computed_transaction_fee, "Transaction fee on teardown phase does not match expected value" + ); + } + fn public_kernel_teardown(self) -> PublicKernelCircuitPublicInputs { // construct the circuit outputs let mut public_inputs = PublicKernelCircuitPublicInputsBuilder::empty(); @@ -42,6 +81,9 @@ impl PublicKernelTeardownCircuitPrivateInputs { let call_request = public_inputs.end_non_revertible.public_call_stack.pop(); common::validate_call_against_request(self.public_call, call_request); + self.validate_start_gas(); + self.validate_transaction_fee(); + common::update_validation_requests(self.public_call, &mut public_inputs); common::update_public_end_non_revertible_values(self.public_call, &mut public_inputs); @@ -60,7 +102,7 @@ mod tests { }; use dep::types::{ abis::{ - call_request::CallRequest, function_selector::FunctionSelector, + call_request::CallRequest, function_selector::FunctionSelector, gas::Gas, kernel_circuit_public_inputs::PublicKernelCircuitPublicInputs, public_data_read::PublicDataRead, public_data_update_request::PublicDataUpdateRequest }, @@ -347,12 +389,16 @@ mod tests { let public_inputs = builder.execute(); - assert_eq(public_inputs.end_non_revertible.encrypted_log_preimages_length, prev_encrypted_log_preimages_length); + assert_eq( + public_inputs.end_non_revertible.encrypted_log_preimages_length, prev_encrypted_log_preimages_length + ); assert_eq( public_inputs.end_non_revertible.unencrypted_log_preimages_length, unencrypted_log_preimages_length + prev_unencrypted_log_preimages_length ); assert_eq(public_inputs.end_non_revertible.encrypted_logs_hashes[0].value, prev_encrypted_logs_hash); - assert_eq(public_inputs.end_non_revertible.unencrypted_logs_hashes[0].value, prev_unencrypted_logs_hash); + assert_eq( + public_inputs.end_non_revertible.unencrypted_logs_hashes[0].value, prev_unencrypted_logs_hash + ); assert_eq(public_inputs.end_non_revertible.unencrypted_logs_hashes[1].value, unencrypted_logs_hash); } @@ -363,4 +409,20 @@ mod tests { builder.failed(); } + + #[test(should_fail_with="Start gas for teardown phase does not match teardown gas allocation")] + fn validates_start_gas() { + let mut builder = PublicKernelTeardownCircuitPrivateInputsBuilder::new(); + builder.public_call.public_inputs.start_gas_left = Gas::new(10, 20, 30); + + builder.failed(); + } + + #[test(should_fail_with="Transaction fee on teardown phase does not match expected value")] + fn validates_transaction_fee() { + let mut builder = PublicKernelTeardownCircuitPrivateInputsBuilder::new(); + builder.public_call.public_inputs.transaction_fee = 1234; + + builder.failed(); + } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data_builder.nr index bc735cc1558..70d79d5180d 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/accumulated_data/private_accumulated_data_builder.nr @@ -18,6 +18,10 @@ use crate::{ traits::Empty }; +// Builds via PrivateKernelCircuitPublicInputsBuilder: +// .finish: PrivateKernelCircuitPublicInputs.end +// .to_combined: KernelCircuitPublicInputs.end +// .split_to_public: PublicKernelCircuitPublicInputs.(end,end_non_revertible) struct PrivateAccumulatedDataBuilder { new_note_hashes: BoundedVec, new_nullifiers: BoundedVec, @@ -35,10 +39,14 @@ struct PrivateAccumulatedDataBuilder { public_call_stack: BoundedVec, gas_used: Gas, + non_revertible_gas_used: Gas, } impl PrivateAccumulatedDataBuilder { pub fn finish(self) -> PrivateAccumulatedData { + assert(self.gas_used.is_empty()); + assert(self.non_revertible_gas_used.is_empty()); + PrivateAccumulatedData { new_note_hashes: self.new_note_hashes.storage, new_nullifiers: self.new_nullifiers.storage, @@ -66,7 +74,7 @@ impl PrivateAccumulatedDataBuilder { encrypted_log_preimages_length: self.encrypted_log_preimages_length, unencrypted_log_preimages_length: self.unencrypted_log_preimages_length, public_data_update_requests: [PublicDataUpdateRequest::empty(); MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], - gas_used: self.gas_used + gas_used: self.gas_used.add(self.non_revertible_gas_used) } } @@ -127,6 +135,8 @@ impl PrivateAccumulatedDataBuilder { revertible_builder.encrypted_log_preimages_length = self.encrypted_log_preimages_length; revertible_builder.unencrypted_log_preimages_length = self.unencrypted_log_preimages_length; + revertible_builder.gas_used = self.gas_used; + non_revertible_builder.gas_used = self.non_revertible_gas_used; (non_revertible_builder.finish(), revertible_builder.finish()) } } @@ -134,7 +144,7 @@ impl PrivateAccumulatedDataBuilder { mod tests { use crate::{ abis::{ - accumulated_data::private_accumulated_data_builder::PrivateAccumulatedDataBuilder, + accumulated_data::private_accumulated_data_builder::PrivateAccumulatedDataBuilder, gas::Gas, call_request::CallRequest, caller_context::CallerContext, public_data_update_request::PublicDataUpdateRequest, side_effect::{SideEffect, SideEffectLinkedToNoteHash} @@ -202,6 +212,9 @@ mod tests { builder.public_call_stack.extend_from_array(non_revertible_public_stack); builder.public_call_stack.extend_from_array(revertible_public_call_stack); + builder.gas_used = Gas::new(20,20,20); + builder.non_revertible_gas_used = Gas::new(10,10,10); + let (non_revertible, revertible) = builder.split_to_public(7); assert(array_eq(non_revertible.new_note_hashes, non_revertible_commitments)); @@ -211,6 +224,9 @@ mod tests { assert(array_eq(revertible.new_note_hashes, revertible_commitments)); assert(array_eq(revertible.new_nullifiers, revertible_nullifiers)); assert(array_eq(revertible.public_call_stack, revertible_public_call_stack)); + + assert_eq(revertible.gas_used, Gas::new(20, 20, 20)); + assert_eq(non_revertible.gas_used, Gas::new(10, 10, 10)); } } @@ -227,6 +243,7 @@ impl Empty for PrivateAccumulatedDataBuilder { private_call_stack: BoundedVec::new(), public_call_stack: BoundedVec::new(), gas_used: Gas::empty(), + non_revertible_gas_used: Gas::empty(), } } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/gas.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/gas.nr index d4c6297b98b..7a738d0e27c 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/gas.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/gas.nr @@ -1,7 +1,7 @@ use crate::{ abis::function_selector::FunctionSelector, address::{EthAddress, AztecAddress}, constants::GAS_LENGTH, hash::pedersen_hash, traits::{Deserialize, Hash, Serialize, Empty}, - abis::side_effect::Ordered, utils::reader::Reader + abis::side_effect::Ordered, utils::reader::Reader, abis::gas_fees::GasFees }; struct Gas { @@ -15,7 +15,7 @@ impl Gas { Self { da_gas, l1_gas, l2_gas } } - fn add(self, other: Gas) -> Self { + pub fn add(self, other: Gas) -> Self { Gas::new( self.da_gas + other.da_gas, self.l1_gas + other.l1_gas, @@ -23,13 +23,23 @@ impl Gas { ) } - fn sub(self, other: Gas) -> Self { + pub fn sub(self, other: Gas) -> Self { Gas::new( self.da_gas - other.da_gas, self.l1_gas - other.l1_gas, self.l2_gas - other.l2_gas ) } + + pub fn compute_fee(self, fees: GasFees) -> Field { + (self.da_gas as Field) * fees.fee_per_da_gas + + (self.l1_gas as Field) * fees.fee_per_l1_gas + + (self.l2_gas as Field) * fees.fee_per_l2_gas + } + + pub fn is_empty(self) -> bool { + (self.da_gas == 0) & (self.l1_gas == 0) & (self.l2_gas == 0) + } } impl Serialize for Gas { diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/gas_fees.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/gas_fees.nr index 5e9fa42c556..09d75aae0c8 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/gas_fees.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/gas_fees.nr @@ -14,6 +14,10 @@ impl GasFees { pub fn new(fee_per_da_gas: Field, fee_per_l1_gas: Field, fee_per_l2_gas: Field) -> Self { Self { fee_per_da_gas, fee_per_l1_gas, fee_per_l2_gas } } + + pub fn default() -> Self { + GasFees::new(1, 1, 1) + } } impl Serialize for GasFees { diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs_builder.nr index b4fc5217621..301dce0667d 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/abis/kernel_circuit_public_inputs/private_kernel_circuit_public_inputs_builder.nr @@ -11,6 +11,10 @@ use crate::{ mocked::AggregationObject, partial_state_reference::PartialStateReference, traits::Empty }; +// Builds: +// .finish: PrivateKernelCircuitPublicInputs +// .finish_tail: KernelCircuitPublicInputs (from KernelCircuitPublicInputsComposer) +// .finish_to_public: PublicKernelCircuitPublicInputs (from KernelCircuitPublicInputsComposer) struct PrivateKernelCircuitPublicInputsBuilder { aggregation_object: AggregationObject, min_revertible_side_effect_counter: u32, diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr index 8a402fae806..eb50167d527 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr @@ -50,6 +50,7 @@ struct FixtureBuilder { private_call_stack: BoundedVec, public_call_stack: BoundedVec, gas_used: Gas, + non_revertible_gas_used: Gas, // Validation requests. max_block_number: MaxBlockNumber, @@ -109,7 +110,8 @@ impl FixtureBuilder { min_revertible_side_effect_counter: 0, counter: 0, start_state: PartialStateReference::empty(), - gas_used: Gas::empty() + gas_used: Gas::empty(), + non_revertible_gas_used: Gas::empty() } } @@ -128,7 +130,8 @@ impl FixtureBuilder { unencrypted_log_preimages_length: self.unencrypted_log_preimages_length, private_call_stack: self.private_call_stack, public_call_stack: self.public_call_stack, - gas_used: self.gas_used + gas_used: self.gas_used, + non_revertible_gas_used: self.non_revertible_gas_used }; public_inputs.finish() } @@ -451,6 +454,7 @@ impl Empty for FixtureBuilder { counter: 0, start_state: PartialStateReference::empty(), gas_used: Gas::empty(), + non_revertible_gas_used: Gas::empty(), } } } diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr index c9844fd07ab..7bcb80dca85 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_call_data_builder.nr @@ -89,7 +89,7 @@ impl PrivateCallDataBuilder { } pub fn build_tx_context(self) -> TxContext { - TxContext { chain_id: 1, version: 0, gas_settings: self.gas_settings } + self.public_inputs.build_tx_context() } pub fn append_private_call_requests(&mut self, num_requests: u64, is_delegate_call: bool) { @@ -170,24 +170,21 @@ impl PrivateCallDataBuilder { self.public_inputs.unencrypted_log_preimages_length += preimages_length; } - pub fn get_call_stack_item_hash(self) -> Field { - let call_stack_item = PrivateCallStackItem { + fn build_call_stack_item(self) -> PrivateCallStackItem { + PrivateCallStackItem { contract_address: self.contract_address, function_data: self.function_data, public_inputs: self.public_inputs.finish() - }; - call_stack_item.hash() + } } - pub fn finish(self) -> PrivateCallData { - let call_stack_item = PrivateCallStackItem { - contract_address: self.contract_address, - function_data: self.function_data, - public_inputs: self.public_inputs.finish() - }; + pub fn get_call_stack_item_hash(self) -> Field { + self.build_call_stack_item().hash() + } + pub fn finish(self) -> PrivateCallData { PrivateCallData { - call_stack_item, + call_stack_item: self.build_call_stack_item(), private_call_stack: self.private_call_stack.storage, public_call_stack: self.public_call_stack.storage, proof: self.proof, diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_circuit_public_inputs_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_circuit_public_inputs_builder.nr index f29d76fceab..f3de910442d 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_circuit_public_inputs_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/private_circuit_public_inputs_builder.nr @@ -85,6 +85,10 @@ impl PrivateCircuitPublicInputsBuilder { public_inputs } + pub fn build_tx_context(self) -> TxContext { + TxContext::new(self.chain_id, self.version, self.gas_settings) + } + pub fn finish(self) -> PrivateCircuitPublicInputs { PrivateCircuitPublicInputs { call_context: self.call_context, @@ -107,7 +111,7 @@ impl PrivateCircuitPublicInputsBuilder { encrypted_log_preimages_length: self.encrypted_log_preimages_length, unencrypted_log_preimages_length: self.unencrypted_log_preimages_length, historical_header: self.historical_header, - tx_context: TxContext::new(self.chain_id, self.version, self.gas_settings) + tx_context: self.build_tx_context() } } } diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index d0bfaa8ff32..5a57d935a31 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -13,6 +13,7 @@ import { LogType, MerkleTreeId, NullifierMembershipWitness, + type ProcessOutput, type ProverClient, PublicDataWitness, type SequencerConfig, @@ -634,7 +635,7 @@ export class AztecNodeService implements AztecNode { * Simulates the public part of a transaction with the current state. * @param tx - The transaction to simulate. **/ - public async simulatePublicCalls(tx: Tx) { + public async simulatePublicCalls(tx: Tx): Promise { this.log.info(`Simulating tx ${tx.getTxHash()}`); const blockNumber = (await this.blockSource.getBlockNumber()) + 1; @@ -660,6 +661,7 @@ export class AztecNodeService implements AztecNode { new WASMSimulator(), ); const processor = await publicProcessorFactory.create(prevHeader, newGlobalVariables); + // REFACTOR: Consider merging ProcessReturnValues into ProcessedTx const [processedTxs, failedTxs, returns] = await processor.process([tx]); if (failedTxs.length) { this.log.warn(`Simulated tx ${tx.getTxHash()} fails: ${failedTxs[0].error}`); @@ -671,7 +673,15 @@ export class AztecNodeService implements AztecNode { throw reverted[0].revertReason; } this.log.info(`Simulated tx ${tx.getTxHash()} succeeds`); - return returns[0]; + const [processedTx] = processedTxs; + return { + constants: processedTx.data.constants, + encryptedLogs: processedTx.encryptedLogs, + unencryptedLogs: processedTx.unencryptedLogs, + end: processedTx.data.end, + revertReason: processedTx.revertReason, + publicReturnValues: returns[0], + }; } public setConfig(config: Partial): Promise { diff --git a/yarn-project/aztec.js/src/contract/contract_function_interaction.ts b/yarn-project/aztec.js/src/contract/contract_function_interaction.ts index 06a8e67abc9..6d34219e5e6 100644 --- a/yarn-project/aztec.js/src/contract/contract_function_interaction.ts +++ b/yarn-project/aztec.js/src/contract/contract_function_interaction.ts @@ -105,7 +105,7 @@ export class ContractFunctionInteraction extends BaseContractInteraction { const txRequest = await this.create(); const simulatedTx = await this.pxe.simulateTx(txRequest, true); this.txRequest = undefined; - const flattened = simulatedTx.publicReturnValues; + const flattened = simulatedTx.publicOutput?.publicReturnValues; return flattened ? decodeReturnValues(this.functionDao, flattened) : []; } } diff --git a/yarn-project/circuit-types/src/interfaces/aztec-node.ts b/yarn-project/circuit-types/src/interfaces/aztec-node.ts index 03e9ddbff8a..95cc81d5bc2 100644 --- a/yarn-project/circuit-types/src/interfaces/aztec-node.ts +++ b/yarn-project/circuit-types/src/interfaces/aztec-node.ts @@ -21,7 +21,7 @@ import { } from '../logs/index.js'; import { type MerkleTreeId } from '../merkle_tree_id.js'; import { type SiblingPath } from '../sibling_path/index.js'; -import { type ProcessReturnValues, type Tx, type TxHash, type TxReceipt } from '../tx/index.js'; +import { type ProcessOutput, type Tx, type TxHash, type TxReceipt } from '../tx/index.js'; import { type TxEffect } from '../tx_effect.js'; import { type SequencerConfig } from './configs.js'; import { type L2BlockNumber } from './l2_block_number.js'; @@ -282,7 +282,7 @@ export interface AztecNode { * This currently just checks that the transaction execution succeeds. * @param tx - The transaction to simulate. **/ - simulatePublicCalls(tx: Tx): Promise; + simulatePublicCalls(tx: Tx): Promise; /** * Updates the configuration of this node. diff --git a/yarn-project/circuit-types/src/mocks.ts b/yarn-project/circuit-types/src/mocks.ts index 9cc201d2feb..3d5c1092bd2 100644 --- a/yarn-project/circuit-types/src/mocks.ts +++ b/yarn-project/circuit-types/src/mocks.ts @@ -12,7 +12,11 @@ import { computeContractClassId, getContractClassFromArtifact, } from '@aztec/circuits.js'; -import { makePublicCallRequest } from '@aztec/circuits.js/testing'; +import { + makeCombinedAccumulatedData, + makeCombinedConstantData, + makePublicCallRequest, +} from '@aztec/circuits.js/testing'; import { type ContractArtifact } from '@aztec/foundation/abi'; import { makeTuple } from '@aztec/foundation/array'; import { times } from '@aztec/foundation/collection'; @@ -23,7 +27,7 @@ import { type ContractInstanceWithAddress, SerializableContractInstance } from ' import { EncryptedL2Log } from './logs/encrypted_l2_log.js'; import { EncryptedFunctionL2Logs, EncryptedTxL2Logs, Note, UnencryptedTxL2Logs } from './logs/index.js'; import { ExtendedNote } from './notes/index.js'; -import { type ProcessReturnValues, SimulatedTx, Tx, TxHash } from './tx/index.js'; +import { type ProcessOutput, type ProcessReturnValues, SimulatedTx, Tx, TxHash } from './tx/index.js'; /** * Testing utility to create empty logs composed from a single empty log. @@ -114,7 +118,15 @@ export const mockTxForRollup = (seed = 1, { hasLogs = false }: { hasLogs?: boole export const mockSimulatedTx = (seed = 1, hasLogs = true) => { const tx = mockTx(seed, { hasLogs }); const dec: ProcessReturnValues = [new Fr(1n), new Fr(2n), new Fr(3n), new Fr(4n)]; - return new SimulatedTx(tx, dec, dec); + const output: ProcessOutput = { + constants: makeCombinedConstantData(), + encryptedLogs: tx.encryptedLogs, + unencryptedLogs: tx.unencryptedLogs, + end: makeCombinedAccumulatedData(), + revertReason: undefined, + publicReturnValues: dec, + }; + return new SimulatedTx(tx, dec, output); }; export const randomContractArtifact = (): ContractArtifact => ({ diff --git a/yarn-project/circuit-types/src/tx/simulated_tx.test.ts b/yarn-project/circuit-types/src/tx/simulated_tx.test.ts index 167c91259fa..8dd9ccc5c25 100644 --- a/yarn-project/circuit-types/src/tx/simulated_tx.test.ts +++ b/yarn-project/circuit-types/src/tx/simulated_tx.test.ts @@ -10,7 +10,7 @@ describe('simulated_tx', () => { it('convert undefined effects to and from json', () => { const simulatedTx = mockSimulatedTx(); simulatedTx.privateReturnValues = undefined; - simulatedTx.publicReturnValues = undefined; + simulatedTx.publicOutput = undefined; expect(SimulatedTx.fromJSON(simulatedTx.toJSON())).toEqual(simulatedTx); }); }); diff --git a/yarn-project/circuit-types/src/tx/simulated_tx.ts b/yarn-project/circuit-types/src/tx/simulated_tx.ts index baef7a2d4ea..bf012e0f901 100644 --- a/yarn-project/circuit-types/src/tx/simulated_tx.ts +++ b/yarn-project/circuit-types/src/tx/simulated_tx.ts @@ -1,32 +1,59 @@ -import { Fr } from '@aztec/circuits.js'; +import { CombinedAccumulatedData, CombinedConstantData, Fr } from '@aztec/circuits.js'; +import { EncryptedTxL2Logs, UnencryptedTxL2Logs } from '../logs/index.js'; +import { type ProcessedTx } from './processed_tx.js'; import { Tx } from './tx.js'; +/** Return values of simulating a circuit. */ export type ProcessReturnValues = Fr[] | undefined; +/** + * Outputs of processing the public component of a transaction. + * REFACTOR: Rename. + */ +export type ProcessOutput = Pick & + Pick & { publicReturnValues: ProcessReturnValues }; + +function processOutputToJSON(output: ProcessOutput) { + return { + encryptedLogs: output.encryptedLogs.toJSON(), + unencryptedLogs: output.unencryptedLogs.toJSON(), + revertReason: output.revertReason, + constants: output.constants.toBuffer().toString('hex'), + end: output.end.toBuffer().toString('hex'), + publicReturnValues: output.publicReturnValues?.map(fr => fr.toString()), + }; +} + +function processOutputFromJSON(json: any): ProcessOutput { + return { + encryptedLogs: EncryptedTxL2Logs.fromJSON(json.encryptedLogs), + unencryptedLogs: UnencryptedTxL2Logs.fromJSON(json.unencryptedLogs), + revertReason: json.revertReason, + constants: CombinedConstantData.fromBuffer(Buffer.from(json.constants, 'hex')), + end: CombinedAccumulatedData.fromBuffer(Buffer.from(json.end, 'hex')), + publicReturnValues: json.publicReturnValues?.map(Fr.fromString), + }; +} + +// REFACTOR: Review what we need to expose to the user when running a simulation. +// Eg tx already has encrypted and unencrypted logs, but those cover only the ones +// emitted during private. We need the ones from ProcessOutput to include the public +// ones as well. However, those would only be present if the user chooses to simulate +// the public side of things. This also points at this class needing to be split into +// two: one with just private simulation, and one that also includes public simulation. export class SimulatedTx { - constructor( - public tx: Tx, - public privateReturnValues?: ProcessReturnValues, - public publicReturnValues?: ProcessReturnValues, - ) {} + constructor(public tx: Tx, public privateReturnValues?: ProcessReturnValues, public publicOutput?: ProcessOutput) {} /** * Convert a SimulatedTx class object to a plain JSON object. * @returns A plain object with SimulatedTx properties. */ public toJSON() { - const returnToJson = (data: ProcessReturnValues | undefined): string => { - if (data === undefined) { - return JSON.stringify(data); - } - return JSON.stringify(data.map(fr => fr.toString())); - }; - return { tx: this.tx.toJSON(), - privateReturnValues: returnToJson(this.privateReturnValues), - publicReturnValues: returnToJson(this.publicReturnValues), + privateReturnValues: this.privateReturnValues?.map(fr => fr.toString()), + publicOutput: this.publicOutput && processOutputToJSON(this.publicOutput), }; } @@ -36,17 +63,10 @@ export class SimulatedTx { * @returns A Tx class object. */ public static fromJSON(obj: any) { - const returnFromJson = (json: string): ProcessReturnValues | undefined => { - if (json === undefined) { - return json; - } - return JSON.parse(json).map(Fr.fromString); - }; - const tx = Tx.fromJSON(obj.tx); - const privateReturnValues = returnFromJson(obj.privateReturnValues); - const publicReturnValues = returnFromJson(obj.publicReturnValues); + const publicOutput = obj.publicOutput ? processOutputFromJSON(obj.publicOutput) : undefined; + const privateReturnValues = obj.privateReturnValues?.map(Fr.fromString); - return new SimulatedTx(tx, privateReturnValues, publicReturnValues); + return new SimulatedTx(tx, privateReturnValues, publicOutput); } } diff --git a/yarn-project/circuits.js/src/structs/gas.ts b/yarn-project/circuits.js/src/structs/gas.ts index 8380f362e25..b1e2a844c88 100644 --- a/yarn-project/circuits.js/src/structs/gas.ts +++ b/yarn-project/circuits.js/src/structs/gas.ts @@ -1,9 +1,10 @@ -import { type Fr } from '@aztec/foundation/fields'; +import { Fr } from '@aztec/foundation/fields'; import { BufferReader, FieldReader, serializeToBuffer, serializeToFields } from '@aztec/foundation/serialize'; import { type FieldsOf } from '@aztec/foundation/types'; import { inspect } from 'util'; +import { type GasFees } from './gas_fees.js'; import { type UInt32 } from './shared.js'; export const GasDimensions = ['da', 'l1', 'l2'] as const; @@ -25,8 +26,8 @@ export class Gas { return this.daGas === other.daGas && this.l1Gas === other.l1Gas && this.l2Gas === other.l2Gas; } - static from(fields: FieldsOf) { - return new Gas(fields.daGas, fields.l1Gas, fields.l2Gas); + static from(fields: Partial>) { + return new Gas(fields.daGas ?? 0, fields.l1Gas ?? 0, fields.l2Gas ?? 0); } static empty() { @@ -67,6 +68,13 @@ export class Gas { return new Gas(Math.ceil(this.daGas * scalar), Math.ceil(this.l1Gas * scalar), Math.ceil(this.l2Gas * scalar)); } + computeFee(gasFees: GasFees) { + return GasDimensions.reduce( + (acc, dimension) => acc.add(gasFees.get(dimension).mul(new Fr(this.get(dimension)))), + Fr.ZERO, + ); + } + toFields() { return serializeToFields(this.daGas, this.l1Gas, this.l2Gas); } diff --git a/yarn-project/circuits.js/src/tests/factories.ts b/yarn-project/circuits.js/src/tests/factories.ts index c6a1a81b2ef..52a9b1e1d0a 100644 --- a/yarn-project/circuits.js/src/tests/factories.ts +++ b/yarn-project/circuits.js/src/tests/factories.ts @@ -169,7 +169,7 @@ export function makeNewSideEffectLinkedToNoteHash(seed: number): SideEffectLinke * @param seed - The seed to use for generating the tx context. * @returns A tx context. */ -export function makeTxContext(seed: number): TxContext { +export function makeTxContext(seed: number = 1): TxContext { // @todo @LHerskind should probably take value for chainId as it will be verified later. return new TxContext(new Fr(seed), Fr.ZERO, makeGasSettings()); } @@ -292,6 +292,10 @@ export function makeRollupValidationRequests(seed = 1) { return new RollupValidationRequests(new MaxBlockNumber(true, new Fr(seed + 0x31415))); } +export function makeCombinedConstantData(seed = 1): CombinedConstantData { + return new CombinedConstantData(makeHeader(seed), makeTxContext(seed + 0x100)); +} + /** * Creates arbitrary accumulated data. * @param seed - The seed to use for generating the accumulated data. diff --git a/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts b/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts index ac36f13ceaf..c4d153b3bed 100644 --- a/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts +++ b/yarn-project/end-to-end/src/e2e_avm_simulator.test.ts @@ -1,4 +1,5 @@ import { type AccountWallet, AztecAddress, Fr, FunctionSelector, TxStatus } from '@aztec/aztec.js'; +import { GasSettings } from '@aztec/circuits.js'; import { AvmAcvmInteropTestContract, AvmInitializerTestContract, @@ -32,6 +33,21 @@ describe('e2e_avm_simulator', () => { avmContract = await AvmTestContract.deploy(wallet).send().deployed(); }, 50_000); + describe('Gas metering', () => { + it('Tracks L2 gas usage on simulation', async () => { + const request = await avmContract.methods.add_args_return(20n, 30n).create(); + const simulation = await wallet.simulateTx(request, true, wallet.getAddress()); + // Subtract the teardown gas allocation from the gas used to figure out the gas used by the contract logic. + const l2TeardownAllocation = GasSettings.simulation().getTeardownLimits().l2Gas; + const l2GasUsed = simulation.publicOutput!.end.gasUsed.l2Gas! - l2TeardownAllocation; + // L2 gas used will vary a lot depending on codegen and other factors, + // so we just set a wide range for it, and check it's not a suspiciously round number. + expect(l2GasUsed).toBeGreaterThan(1e3); + expect(l2GasUsed).toBeLessThan(1e6); + expect(l2GasUsed! % 1000).not.toEqual(0); + }); + }); + describe('Storage', () => { it('Modifies storage (Field)', async () => { await avmContract.methods.set_storage_single(20n).send().wait(); diff --git a/yarn-project/prover-client/src/mocks/test_context.ts b/yarn-project/prover-client/src/mocks/test_context.ts index 42ecc4985c0..0fe75860cc0 100644 --- a/yarn-project/prover-client/src/mocks/test_context.ts +++ b/yarn-project/prover-client/src/mocks/test_context.ts @@ -1,5 +1,6 @@ import { type BlockProver, type ProcessedTx, type Tx, type TxValidator } from '@aztec/circuit-types'; -import { GlobalVariables, Header } from '@aztec/circuits.js'; +import { type Gas, GlobalVariables, Header, type TxContext } from '@aztec/circuits.js'; +import { type Fr } from '@aztec/foundation/fields'; import { type DebugLogger } from '@aztec/foundation/log'; import { openTmpStore } from '@aztec/kv-store/utils'; import { @@ -121,11 +122,22 @@ export class TestContext { blockProver?: BlockProver, txValidator?: TxValidator, ) { - const defaultExecutorImplementation = (execution: PublicExecution, _1: GlobalVariables, _2?: number) => { + const defaultExecutorImplementation = ( + execution: PublicExecution, + _globalVariables: GlobalVariables, + availableGas: Gas, + _txContext: TxContext, + transactionFee?: Fr, + _sideEffectCounter?: number, + ) => { for (const tx of txs) { for (const request of tx.enqueuedPublicFunctionCalls) { if (execution.contractAddress.equals(request.contractAddress)) { - const result = PublicExecutionResultBuilder.fromPublicCallRequest({ request }).build(); + const result = PublicExecutionResultBuilder.fromPublicCallRequest({ request }).build({ + startGasLeft: availableGas, + endGasLeft: availableGas, + transactionFee, + }); // result.unencryptedLogs = tx.unencryptedLogs.functionLogs[0]; return Promise.resolve(result); } @@ -150,6 +162,9 @@ export class TestContext { executorMock?: ( execution: PublicExecution, globalVariables: GlobalVariables, + availableGas: Gas, + txContext: TxContext, + transactionFee?: Fr, sideEffectCounter?: number, ) => Promise, ) { diff --git a/yarn-project/pxe/src/pxe_service/pxe_service.ts b/yarn-project/pxe/src/pxe_service/pxe_service.ts index c3d5de0c33d..e87a447f81b 100644 --- a/yarn-project/pxe/src/pxe_service/pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/pxe_service.ts @@ -420,7 +420,7 @@ export class PXEService implements PXE { txRequest: TxExecutionRequest, simulatePublic: boolean, msgSender: AztecAddress | undefined = undefined, - ) { + ): Promise { if (!txRequest.functionData.isPrivate) { throw new Error(`Public entrypoints are not allowed`); } @@ -441,7 +441,7 @@ export class PXEService implements PXE { } if (simulatePublic) { - simulatedTx.publicReturnValues = await this.#simulatePublicCalls(simulatedTx.tx); + simulatedTx.publicOutput = await this.#simulatePublicCalls(simulatedTx.tx); } if (!msgSender) { diff --git a/yarn-project/simulator/src/mocks/fixtures.ts b/yarn-project/simulator/src/mocks/fixtures.ts index 2930fd627d0..f7fd2d72562 100644 --- a/yarn-project/simulator/src/mocks/fixtures.ts +++ b/yarn-project/simulator/src/mocks/fixtures.ts @@ -102,7 +102,7 @@ export class PublicExecutionResultBuilder { return this; } - build(): PublicExecutionResult { + build(overrides: Partial = {}): PublicExecutionResult { return { execution: this._execution, nestedExecutions: this._nestedExecutions, @@ -120,7 +120,10 @@ export class PublicExecutionResultBuilder { endSideEffectCounter: Fr.ZERO, reverted: this._reverted, revertReason: this._revertReason, - gasLeft: Gas.test(), // TODO(palla/gas): Set a proper value + startGasLeft: Gas.test(), + endGasLeft: Gas.test(), + transactionFee: Fr.ZERO, + ...overrides, }; } } diff --git a/yarn-project/simulator/src/public/abstract_phase_manager.ts b/yarn-project/simulator/src/public/abstract_phase_manager.ts index 4564b7d4130..27f513dd4d2 100644 --- a/yarn-project/simulator/src/public/abstract_phase_manager.ts +++ b/yarn-project/simulator/src/public/abstract_phase_manager.ts @@ -227,6 +227,9 @@ export abstract class AbstractPhaseManager { const newUnencryptedFunctionLogs: UnencryptedFunctionL2Logs[] = []; + // Transaction fee is zero for all phases except teardown + const transactionFee = this.getTransactionFee(tx, previousPublicKernelOutput); + // TODO(#1684): Should multiple separately enqueued public calls be treated as // separate public callstacks to be proven by separate public kernel sequences // and submitted separately to the base rollup? @@ -243,9 +246,17 @@ export abstract class AbstractPhaseManager { const current = executionStack.pop()!; const isExecutionRequest = !isPublicExecutionResult(current); const sideEffectCounter = lastSideEffectCounter(tx) + 1; + const availableGas = this.getAvailableGas(tx, previousPublicKernelOutput); const result = isExecutionRequest - ? await this.publicExecutor.simulate(current, this.globalVariables, sideEffectCounter) + ? await this.publicExecutor.simulate( + current, + this.globalVariables, + availableGas, + tx.data.constants.txContext, + transactionFee, + sideEffectCounter, + ) : current; const functionSelector = result.execution.functionData.selector.toString(); @@ -308,6 +319,17 @@ export abstract class AbstractPhaseManager { return [publicKernelInputs, kernelOutput, kernelProof, newUnencryptedFunctionLogs, undefined, returns]; } + protected getAvailableGas(tx: Tx, previousPublicKernelOutput: PublicKernelCircuitPublicInputs) { + return tx.data.constants.txContext.gasSettings + .getLimits() // No need to subtract teardown limits since they are already included in end.gasUsed + .sub(previousPublicKernelOutput.end.gasUsed) + .sub(previousPublicKernelOutput.endNonRevertibleData.gasUsed); + } + + protected getTransactionFee(_tx: Tx, _previousPublicKernelOutput: PublicKernelCircuitPublicInputs) { + return Fr.ZERO; + } + protected async runKernelCircuit( previousOutput: PublicKernelCircuitPublicInputs, previousProof: Proof, @@ -348,7 +370,7 @@ export abstract class AbstractPhaseManager { return new PublicKernelData(previousOutput, previousProof, vk, vkIndex, vkSiblingPath); } - protected async getPublicCircuitPublicInputs(result: PublicExecutionResult) { + protected async getPublicCallStackItem(result: PublicExecutionResult, isExecutionRequest = false) { const publicDataTreeInfo = await this.db.getTreeInfo(MerkleTreeId.PUBLIC_DATA_TREE); this.historicalHeader.state.partial.publicDataTree.root = Fr.fromBuffer(publicDataTreeInfo.root); @@ -361,7 +383,7 @@ export abstract class AbstractPhaseManager { const unencryptedLogPreimagesLength = new Fr(result.unencryptedLogs.getSerializedLength()); - return PublicCircuitPublicInputs.from({ + const publicCircuitPublicInputs = PublicCircuitPublicInputs.from({ callContext: result.execution.callContext, proverAddress: AztecAddress.ZERO, argsHash: computeVarArgsHash(result.execution.args), @@ -399,20 +421,17 @@ export abstract class AbstractPhaseManager { ), unencryptedLogPreimagesLength, historicalHeader: this.historicalHeader, + startGasLeft: Gas.from(result.startGasLeft), + endGasLeft: Gas.from(result.endGasLeft), + transactionFee: result.transactionFee, // TODO(@just-mitch): need better mapping from simulator to revert code. revertCode: result.reverted ? RevertCode.REVERTED : RevertCode.OK, - // TODO(palla/gas): Set proper values - startGasLeft: Gas.test(), - endGasLeft: Gas.test(), - transactionFee: Fr.ZERO, }); - } - protected async getPublicCallStackItem(result: PublicExecutionResult, isExecutionRequest = false) { return new PublicCallStackItem( result.execution.contractAddress, result.execution.functionData, - await this.getPublicCircuitPublicInputs(result), + publicCircuitPublicInputs, isExecutionRequest, ); } diff --git a/yarn-project/simulator/src/public/execution.ts b/yarn-project/simulator/src/public/execution.ts index 871002e9635..25523f24a67 100644 --- a/yarn-project/simulator/src/public/execution.ts +++ b/yarn-project/simulator/src/public/execution.ts @@ -61,8 +61,12 @@ export interface PublicExecutionResult { * The revert reason if the execution reverted. */ revertReason: SimulationError | undefined; + /** How much gas was available for this public execution. */ + startGasLeft: Gas; /** How much gas was left after this public execution. */ - gasLeft: Gas; // TODO(palla/gas): Check this field + endGasLeft: Gas; + /** Transaction fee set for this tx. */ + transactionFee: Fr; } /** diff --git a/yarn-project/simulator/src/public/executor.ts b/yarn-project/simulator/src/public/executor.ts index 57710e7c5a8..9e5c3a098c1 100644 --- a/yarn-project/simulator/src/public/executor.ts +++ b/yarn-project/simulator/src/public/executor.ts @@ -1,5 +1,12 @@ import { UnencryptedFunctionL2Logs } from '@aztec/circuit-types'; -import { Fr, Gas, type GlobalVariables, type Header, PublicCircuitPublicInputs } from '@aztec/circuits.js'; +import { + Fr, + Gas, + type GlobalVariables, + type Header, + PublicCircuitPublicInputs, + type TxContext, +} from '@aztec/circuits.js'; import { createDebugLogger } from '@aztec/foundation/log'; import { spawn } from 'child_process'; @@ -64,9 +71,11 @@ async function executePublicFunctionAvm(executionContext: PublicExecutionContext executionContext.execution, executionContext.header, executionContext.globalVariables, + executionContext.gasSettings, + executionContext.transactionFee, ); - const machineState = new AvmMachineState(Gas.test()); // TODO(palla/gas): Set proper values + const machineState = new AvmMachineState(executionContext.availableGas); const context = new AvmContext(worldStateJournal, executionEnv, machineState); const simulator = new AvmSimulator(context); @@ -152,7 +161,9 @@ async function executePublicFunctionAcvm( unencryptedLogs: UnencryptedFunctionL2Logs.empty(), reverted, revertReason, - gasLeft: Gas.empty(), + startGasLeft: context.availableGas, + endGasLeft: Gas.empty(), + transactionFee: context.transactionFee, }; } @@ -196,7 +207,8 @@ async function executePublicFunctionAcvm( const nestedExecutions = context.getNestedExecutions(); const unencryptedLogs = context.getUnencryptedLogs(); - const gasLeft = Gas.test(); // TODO(palla/gas): Set proper value + const startGasLeft = context.availableGas; + const endGasLeft = context.availableGas; // No gas consumption in non-AVM return { execution, @@ -215,7 +227,9 @@ async function executePublicFunctionAcvm( unencryptedLogs, reverted: false, revertReason: undefined, - gasLeft, + startGasLeft, + endGasLeft, + transactionFee: context.transactionFee, }; } @@ -240,6 +254,9 @@ export class PublicExecutor { public async simulate( execution: PublicExecution, globalVariables: GlobalVariables, + availableGas: Gas, + txContext: TxContext, + transactionFee: Fr = Fr.ZERO, sideEffectCounter: number = 0, ): Promise { // Functions can request to pack arguments before calling other functions. @@ -255,6 +272,9 @@ export class PublicExecutor { this.stateDb, this.contractsDb, this.commitmentsDb, + availableGas, + transactionFee, + txContext.gasSettings, ); const executionResult = await executePublicFunction(context, /*nested=*/ false); diff --git a/yarn-project/simulator/src/public/index.test.ts b/yarn-project/simulator/src/public/index.test.ts index 8e8fb651afe..644b1e970ea 100644 --- a/yarn-project/simulator/src/public/index.test.ts +++ b/yarn-project/simulator/src/public/index.test.ts @@ -3,6 +3,7 @@ import { AppendOnlyTreeSnapshot, CallContext, FunctionData, + Gas, GasFees, GlobalVariables, type Header, @@ -13,7 +14,7 @@ import { NullifierLeafPreimage, } from '@aztec/circuits.js'; import { computeInnerNoteHash, computeNoteContentHash, siloNullifier } from '@aztec/circuits.js/hash'; -import { makeHeader } from '@aztec/circuits.js/testing'; +import { makeHeader, makeTxContext } from '@aztec/circuits.js/testing'; import { type FunctionArtifact, FunctionSelector, encodeArguments } from '@aztec/foundation/abi'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { pedersenHash, randomInt } from '@aztec/foundation/crypto'; @@ -97,6 +98,9 @@ describe('ACIR public execution simulator', () => { ...overrides, }); + const simulate = (execution: PublicExecution, globalVariables: GlobalVariables) => + executor.simulate(execution, globalVariables, Gas.test(), makeTxContext(), Fr.ZERO); + describe('Token contract', () => { let recipient: AztecAddress; let contractAddress: AztecAddress; @@ -130,7 +134,7 @@ describe('ACIR public execution simulator', () => { .mockResolvedValueOnce(previousTotalSupply); // reading total supply const execution: PublicExecution = { contractAddress, functionData, args, callContext }; - const result = await executor.simulate(execution, globalVariables); + const result = await simulate(execution, globalVariables); expect(result.revertReason).toBeUndefined(); const recipientBalanceStorageSlot = computeSlotForMapping(new Fr(6n), recipient); @@ -212,7 +216,7 @@ describe('ACIR public execution simulator', () => { const recipientBalance = new Fr(20n); mockStore(senderBalance, recipientBalance); - const result = await executor.simulate(execution, globalVariables); + const result = await simulate(execution, globalVariables); const expectedRecipientBalance = new Fr(160n); const expectedSenderBalance = new Fr(60n); @@ -240,7 +244,7 @@ describe('ACIR public execution simulator', () => { const recipientBalance = new Fr(20n); mockStore(senderBalance, recipientBalance); - const { reverted, revertReason } = await executor.simulate(execution, globalVariables); + const { reverted, revertReason } = await simulate(execution, globalVariables); expect(reverted).toBe(true); expect(revertReason?.message).toMatch('Assertion failed: attempt to subtract with underflow'); }); @@ -289,7 +293,7 @@ describe('ACIR public execution simulator', () => { new GasFees(new Fr(10), new Fr(11), new Fr(12)), ); - const result = await executor.simulate(execution, globalVariables); + const result = await simulate(execution, globalVariables); expect(result.returnValues[0]).toEqual( new Fr( initialValue + @@ -329,7 +333,7 @@ describe('ACIR public execution simulator', () => { publicState.storageRead.mockResolvedValue(amount); const execution: PublicExecution = { contractAddress, functionData, args, callContext }; - const result = await executor.simulate(execution, globalVariables); + const result = await simulate(execution, globalVariables); // Assert the note hash was created expect(result.newNoteHashes.length).toEqual(1); @@ -352,7 +356,7 @@ describe('ACIR public execution simulator', () => { publicContracts.getBytecode.mockResolvedValue(createL2ToL1MessagePublicArtifact.bytecode); const execution: PublicExecution = { contractAddress, functionData, args, callContext }; - const result = await executor.simulate(execution, globalVariables); + const result = await simulate(execution, globalVariables); // Assert the l2 to l1 message was created expect(result.newL2ToL1Messages.length).toEqual(1); @@ -374,7 +378,7 @@ describe('ACIR public execution simulator', () => { publicContracts.getBytecode.mockResolvedValue(createNullifierPublicArtifact.bytecode); const execution: PublicExecution = { contractAddress, functionData, args, callContext }; - const result = await executor.simulate(execution, globalVariables); + const result = await simulate(execution, globalVariables); // Assert the l2 to l1 message was created expect(result.newNullifiers.length).toEqual(1); @@ -476,7 +480,7 @@ describe('ACIR public execution simulator', () => { const execution: PublicExecution = { contractAddress, functionData, args, callContext }; executor = new PublicExecutor(publicState, publicContracts, commitmentsDb, header); - const result = await executor.simulate(execution, globalVariables); + const result = await simulate(execution, globalVariables); expect(result.newNullifiers.length).toEqual(1); }); @@ -493,7 +497,7 @@ describe('ACIR public execution simulator', () => { const execution: PublicExecution = { contractAddress, functionData, args, callContext }; executor = new PublicExecutor(publicState, publicContracts, commitmentsDb, header); - const { revertReason, reverted } = await executor.simulate(execution, globalVariables); + const { revertReason, reverted } = await simulate(execution, globalVariables); expect(reverted).toBe(true); expect(revertReason?.message).toMatch(`Message not in state`); }); @@ -510,7 +514,7 @@ describe('ACIR public execution simulator', () => { const execution: PublicExecution = { contractAddress, functionData, args, callContext }; executor = new PublicExecutor(publicState, publicContracts, commitmentsDb, header); - const { revertReason, reverted } = await executor.simulate(execution, globalVariables); + const { revertReason, reverted } = await simulate(execution, globalVariables); expect(reverted).toBe(true); expect(revertReason?.message).toMatch(`Message not in state`); }); @@ -527,7 +531,7 @@ describe('ACIR public execution simulator', () => { const execution: PublicExecution = { contractAddress, functionData, args, callContext }; executor = new PublicExecutor(publicState, publicContracts, commitmentsDb, header); - const { revertReason, reverted } = await executor.simulate(execution, globalVariables); + const { revertReason, reverted } = await simulate(execution, globalVariables); expect(reverted).toBe(true); expect(revertReason?.message).toMatch(`Message not in state`); }); @@ -544,7 +548,7 @@ describe('ACIR public execution simulator', () => { const execution: PublicExecution = { contractAddress, functionData, args, callContext }; executor = new PublicExecutor(publicState, publicContracts, commitmentsDb, header); - const { revertReason, reverted } = await executor.simulate(execution, globalVariables); + const { revertReason, reverted } = await simulate(execution, globalVariables); expect(reverted).toBe(true); expect(revertReason?.message).toMatch(`Message not in state`); }); @@ -561,7 +565,7 @@ describe('ACIR public execution simulator', () => { const execution: PublicExecution = { contractAddress, functionData, args, callContext }; executor = new PublicExecutor(publicState, publicContracts, commitmentsDb, header); - const { revertReason, reverted } = await executor.simulate(execution, globalVariables); + const { revertReason, reverted } = await simulate(execution, globalVariables); expect(reverted).toBe(true); expect(revertReason?.message).toMatch(`Message not in state`); }); @@ -579,7 +583,7 @@ describe('ACIR public execution simulator', () => { const execution: PublicExecution = { contractAddress, functionData, args, callContext }; executor = new PublicExecutor(publicState, publicContracts, commitmentsDb, header); - const { revertReason, reverted } = await executor.simulate(execution, globalVariables); + const { revertReason, reverted } = await simulate(execution, globalVariables); expect(reverted).toBe(true); expect(revertReason?.message).toMatch(`Message not in state`); }); @@ -597,7 +601,7 @@ describe('ACIR public execution simulator', () => { const execution: PublicExecution = { contractAddress, functionData, args, callContext }; executor = new PublicExecutor(publicState, publicContracts, commitmentsDb, header); - const { revertReason, reverted } = await executor.simulate(execution, globalVariables); + const { revertReason, reverted } = await simulate(execution, globalVariables); expect(reverted).toBe(true); expect(revertReason?.message).toMatch(`Message not in state`); }); @@ -664,7 +668,7 @@ describe('ACIR public execution simulator', () => { const execution: PublicExecution = { contractAddress, functionData, args, callContext }; executor = new PublicExecutor(publicState, publicContracts, commitmentsDb, header); - expect(() => executor.simulate(execution, globalVariables)).not.toThrow(); + expect(() => simulate(execution, globalVariables)).not.toThrow(); }); it('Invalid', async () => { @@ -680,7 +684,7 @@ describe('ACIR public execution simulator', () => { const execution: PublicExecution = { contractAddress, functionData, args, callContext }; executor = new PublicExecutor(publicState, publicContracts, commitmentsDb, header); - const { revertReason, reverted } = await executor.simulate(execution, globalVariables); + const { revertReason, reverted } = await simulate(execution, globalVariables); expect(reverted).toBe(true); expect(revertReason?.message).toMatch(`Invalid ${description.toLowerCase()}`); }); @@ -711,7 +715,7 @@ describe('ACIR public execution simulator', () => { const execution: PublicExecution = { contractAddress, functionData, args, callContext }; executor = new PublicExecutor(publicState, publicContracts, commitmentsDb, header); - expect(() => executor.simulate(execution, globalVariables)).not.toThrow(); + expect(() => simulate(execution, globalVariables)).not.toThrow(); }); it('Throws when header is not as expected', async () => { @@ -721,7 +725,7 @@ describe('ACIR public execution simulator', () => { const execution: PublicExecution = { contractAddress, functionData, args, callContext }; executor = new PublicExecutor(publicState, publicContracts, commitmentsDb, header); - const { revertReason, reverted } = await executor.simulate(execution, globalVariables); + const { revertReason, reverted } = await simulate(execution, globalVariables); expect(reverted).toBe(true); expect(revertReason?.message).toMatch(`Invalid header hash`); }); diff --git a/yarn-project/simulator/src/public/public_execution_context.ts b/yarn-project/simulator/src/public/public_execution_context.ts index c3ac433ce29..7f16fd7a19b 100644 --- a/yarn-project/simulator/src/public/public_execution_context.ts +++ b/yarn-project/simulator/src/public/public_execution_context.ts @@ -3,7 +3,8 @@ import { CallContext, FunctionData, type FunctionSelector, - Gas, + type Gas, + type GasSettings, type GlobalVariables, type Header, PublicContextInputs, @@ -40,6 +41,9 @@ export class PublicExecutionContext extends TypedOracle { public readonly stateDb: PublicStateDB, public readonly contractsDb: PublicContractsDB, public readonly commitmentsDb: CommitmentsDB, + public readonly availableGas: Gas, + public readonly transactionFee: Fr, + public readonly gasSettings: GasSettings, private log = createDebugLogger('aztec:simulator:public_execution_context'), ) { super(); @@ -62,8 +66,8 @@ export class PublicExecutionContext extends TypedOracle { this.header, this.globalVariables, this.sideEffectCounter.current(), - Gas.test(), // TODO(palla/gas): Set proper values - new Fr(0), + this.availableGas, + this.transactionFee, ); const fields = [...publicContextInputs.toFields(), ...args]; return toACVMWitness(witnessStartIndex, fields); @@ -222,6 +226,9 @@ export class PublicExecutionContext extends TypedOracle { this.stateDb, this.contractsDb, this.commitmentsDb, + this.availableGas, + this.transactionFee, + this.gasSettings, this.log, ); diff --git a/yarn-project/simulator/src/public/public_processor.test.ts b/yarn-project/simulator/src/public/public_processor.test.ts index 904321e2d6c..7f52b6352f9 100644 --- a/yarn-project/simulator/src/public/public_processor.test.ts +++ b/yarn-project/simulator/src/public/public_processor.test.ts @@ -12,6 +12,9 @@ import { AppendOnlyTreeSnapshot, ContractStorageUpdateRequest, Fr, + Gas, + GasFees, + GasSettings, GlobalVariables, Header, PUBLIC_DATA_TREE_HEIGHT, @@ -25,6 +28,7 @@ import { import { computePublicDataTreeLeafSlot } from '@aztec/circuits.js/hash'; import { fr, makeAztecAddress, makePublicCallRequest, makeSelector } from '@aztec/circuits.js/testing'; import { arrayNonEmptyLength } from '@aztec/foundation/collection'; +import { type FieldsOf } from '@aztec/foundation/types'; import { openTmpStore } from '@aztec/kv-store/utils'; import { type AppendOnlyTree, Pedersen, StandardTree, newTree } from '@aztec/merkle-tree'; import { type PublicExecutionResult, type PublicExecutor, WASMSimulator } from '@aztec/simulator'; @@ -197,7 +201,7 @@ describe('public_processor', () => { db, publicExecutor, publicKernel, - GlobalVariables.empty(), + GlobalVariables.from({ ...GlobalVariables.empty(), gasFees: GasFees.default() }), header, publicContractsDB, publicWorldStateDB, @@ -345,6 +349,9 @@ describe('public_processor', () => { publicCallRequests, }); + const teardownGas = tx.data.constants.txContext.gasSettings.getTeardownLimits(); + const teardownResultSettings = { startGasLeft: teardownGas, endGasLeft: teardownGas }; + const contractSlotA = fr(0x100); const contractSlotB = fr(0x150); const contractSlotC = fr(0x200); @@ -389,9 +396,9 @@ describe('public_processor', () => { contractStorageUpdateRequests: [ new ContractStorageUpdateRequest(contractSlotC, fr(0x201), 12, baseContractAddress), ], - }).build(), + }).build(teardownResultSettings), ], - }).build(), + }).build(teardownResultSettings), ]; publicExecutor.simulate.mockImplementation(execution => { @@ -551,6 +558,9 @@ describe('public_processor', () => { publicCallRequests, }); + const teardownGas = tx.data.constants.txContext.gasSettings.getTeardownLimits(); + const teardownResultSettings = { startGasLeft: teardownGas, endGasLeft: teardownGas }; + const contractSlotA = fr(0x100); const contractSlotB = fr(0x150); const contractSlotC = fr(0x200); @@ -588,16 +598,16 @@ describe('public_processor', () => { from: publicCallRequests[1].contractAddress, tx: makeFunctionCall(baseContractAddress, makeSelector(5)), revertReason: new SimulationError('Simulation Failed', []), - }).build(), + }).build(teardownResultSettings), PublicExecutionResultBuilder.fromFunctionCall({ from: publicCallRequests[1].contractAddress, tx: makeFunctionCall(baseContractAddress, makeSelector(5)), contractStorageUpdateRequests: [ new ContractStorageUpdateRequest(contractSlotC, fr(0x201), 14, baseContractAddress), ], - }).build(), + }).build(teardownResultSettings), ], - }).build(), + }).build(teardownResultSettings), ]; publicExecutor.simulate.mockImplementation(execution => { @@ -648,15 +658,44 @@ describe('public_processor', () => { publicCallRequests, }); - // const baseContractAddress = makeAztecAddress(30); + const gasLimits = Gas.from({ l1Gas: 1e9, l2Gas: 1e9, daGas: 1e9 }); + const teardownGas = Gas.from({ l1Gas: 1e7, l2Gas: 1e7, daGas: 1e7 }); + tx.data.constants.txContext.gasSettings = GasSettings.from({ + gasLimits: gasLimits, + teardownGasLimits: teardownGas, + inclusionFee: new Fr(1e4), + maxFeesPerGas: { feePerDaGas: new Fr(10), feePerL1Gas: new Fr(10), feePerL2Gas: new Fr(10) }, + }); + + // Private kernel tail to public pushes teardown gas allocation into revertible gas used + tx.data.forPublic!.end.gasUsed = teardownGas; + tx.data.forPublic!.endNonRevertibleData.gasUsed = Gas.empty(); + const contractSlotA = fr(0x100); const contractSlotB = fr(0x150); const contractSlotC = fr(0x200); let simulatorCallCount = 0; + + const initialGas = gasLimits.sub(teardownGas); + const afterSetupGas = initialGas.sub(Gas.from({ l2Gas: 1e6 })); + const afterAppGas = afterSetupGas.sub(Gas.from({ l2Gas: 2e6, daGas: 2e6 })); + const afterTeardownGas = teardownGas.sub(Gas.from({ l2Gas: 3e6, daGas: 3e6 })); + + // Total gas used is the sum of teardown gas allocation plus all expenditures along the way, + // without including the gas used in the teardown phase (since that's consumed entirely up front). + const expectedTotalGasUsed = { l2Gas: 1e7 + 1e6 + 2e6, daGas: 1e7 + 2e6, l1Gas: 1e7 }; + + // Inclusion fee plus block gas fees times total gas used + const expectedTxFee = 1e4 + (1e7 + 1e6 + 2e6) * 1 + (1e7 + 2e6) * 1 + 1e7 * 1; + const transactionFee = new Fr(expectedTxFee); + const simulatorResults: PublicExecutionResult[] = [ // Setup - PublicExecutionResultBuilder.fromPublicCallRequest({ request: publicCallRequests[0] }).build(), + PublicExecutionResultBuilder.fromPublicCallRequest({ request: publicCallRequests[0] }).build({ + startGasLeft: initialGas, + endGasLeft: afterSetupGas, + }), // App Logic PublicExecutionResultBuilder.fromPublicCallRequest({ @@ -665,7 +704,10 @@ describe('public_processor', () => { new ContractStorageUpdateRequest(contractSlotA, fr(0x101), 14, baseContractAddress), new ContractStorageUpdateRequest(contractSlotB, fr(0x151), 15, baseContractAddress), ], - }).build(), + }).build({ + startGasLeft: afterSetupGas, + endGasLeft: afterAppGas, + }), // Teardown PublicExecutionResultBuilder.fromPublicCallRequest({ @@ -678,21 +720,26 @@ describe('public_processor', () => { new ContractStorageUpdateRequest(contractSlotA, fr(0x101), 11, baseContractAddress), new ContractStorageUpdateRequest(contractSlotC, fr(0x201), 12, baseContractAddress), ], - }).build(), + }).build({ startGasLeft: teardownGas, endGasLeft: teardownGas, transactionFee }), PublicExecutionResultBuilder.fromFunctionCall({ from: publicCallRequests[1].contractAddress, tx: makeFunctionCall(baseContractAddress, makeSelector(5)), contractStorageUpdateRequests: [ new ContractStorageUpdateRequest(contractSlotA, fr(0x102), 13, baseContractAddress), ], - }).build(), + }).build({ startGasLeft: teardownGas, endGasLeft: teardownGas, transactionFee }), ], - }).build(), + }).build({ + startGasLeft: teardownGas, + endGasLeft: afterTeardownGas, + transactionFee, + }), ]; publicExecutor.simulate.mockImplementation(execution => { if (simulatorCallCount < simulatorResults.length) { - return Promise.resolve(simulatorResults[simulatorCallCount++]); + const result = simulatorResults[simulatorCallCount++]; + return Promise.resolve(result); } else { throw new Error(`Unexpected execution request: ${execution}, call count: ${simulatorCallCount}`); } @@ -711,12 +758,28 @@ describe('public_processor', () => { expect(setupSpy).toHaveBeenCalledTimes(1); expect(appLogicSpy).toHaveBeenCalledTimes(1); expect(teardownSpy).toHaveBeenCalledTimes(3); + + const expectedSimulateCall = (availableGas: Partial>, txFee: number) => [ + expect.anything(), // PublicExecution + expect.anything(), // GlobalVariables + Gas.from(availableGas), + expect.anything(), // TxContext + new Fr(txFee), + expect.anything(), // SideEffectCounter + ]; + expect(publicExecutor.simulate).toHaveBeenCalledTimes(3); + expect(publicExecutor.simulate).toHaveBeenNthCalledWith(1, ...expectedSimulateCall(initialGas, 0)); + expect(publicExecutor.simulate).toHaveBeenNthCalledWith(2, ...expectedSimulateCall(afterSetupGas, 0)); + expect(publicExecutor.simulate).toHaveBeenNthCalledWith(3, ...expectedSimulateCall(teardownGas, expectedTxFee)); + expect(publicWorldStateDB.checkpoint).toHaveBeenCalledTimes(3); expect(publicWorldStateDB.rollbackToCheckpoint).toHaveBeenCalledTimes(0); expect(publicWorldStateDB.commit).toHaveBeenCalledTimes(1); expect(publicWorldStateDB.rollbackToCommit).toHaveBeenCalledTimes(0); + expect(processed[0].data.end.gasUsed).toEqual(Gas.from(expectedTotalGasUsed)); + const txEffect = toTxEffect(processed[0]); expect(arrayNonEmptyLength(txEffect.publicDataWrites, PublicDataWrite.isEmpty)).toEqual(3); expect(txEffect.publicDataWrites[0]).toEqual( diff --git a/yarn-project/simulator/src/public/teardown_phase_manager.ts b/yarn-project/simulator/src/public/teardown_phase_manager.ts index c5e84807227..6cec359c9c5 100644 --- a/yarn-project/simulator/src/public/teardown_phase_manager.ts +++ b/yarn-project/simulator/src/public/teardown_phase_manager.ts @@ -1,5 +1,7 @@ import { type PublicKernelRequest, PublicKernelType, type Tx } from '@aztec/circuit-types'; import { + type Fr, + type Gas, type GlobalVariables, type Header, type Proof, @@ -8,6 +10,8 @@ import { import { type PublicExecutor, type PublicStateDB } from '@aztec/simulator'; import { type MerkleTreeOperations } from '@aztec/world-state'; +import { inspect } from 'util'; + import { AbstractPhaseManager, PublicKernelPhase } from './abstract_phase_manager.js'; import { type ContractsDataSourcePublicDB } from './public_executor.js'; import { type PublicKernelCircuitSimulator } from './public_kernel_circuit_simulator.js'; @@ -63,4 +67,18 @@ export class TeardownPhaseManager extends AbstractPhaseManager { returnValues: undefined, }; } + + protected override getTransactionFee(tx: Tx, previousPublicKernelOutput: PublicKernelCircuitPublicInputs): Fr { + const gasSettings = tx.data.constants.txContext.gasSettings; + const gasFees = this.globalVariables.gasFees; + // No need to add teardown limits since they are already included in end.gasUsed + const gasUsed = previousPublicKernelOutput.end.gasUsed.add(previousPublicKernelOutput.endNonRevertibleData.gasUsed); + const txFee = gasSettings.inclusionFee.add(gasUsed.computeFee(gasFees)); + this.log.debug(`Computed tx fee`, { txFee, gasUsed: inspect(gasUsed), gasFees: inspect(gasFees) }); + return txFee; + } + + protected override getAvailableGas(tx: Tx, _previousPublicKernelOutput: PublicKernelCircuitPublicInputs): Gas { + return tx.data.constants.txContext.gasSettings.getTeardownLimits(); + } } diff --git a/yarn-project/simulator/src/public/transitional_adaptors.ts b/yarn-project/simulator/src/public/transitional_adaptors.ts index a3ba62e191f..437d7a4a529 100644 --- a/yarn-project/simulator/src/public/transitional_adaptors.ts +++ b/yarn-project/simulator/src/public/transitional_adaptors.ts @@ -5,7 +5,8 @@ import { ContractStorageRead, ContractStorageUpdateRequest, FunctionData, - GasSettings, + Gas, + type GasSettings, type GlobalVariables, type Header, L2ToL1Message, @@ -37,6 +38,8 @@ export function createAvmExecutionEnvironment( current: PublicExecution, header: Header, globalVariables: GlobalVariables, + gasSettings: GasSettings, + transactionFee: Fr, ): AvmExecutionEnvironment { return new AvmExecutionEnvironment( current.contractAddress, @@ -51,8 +54,8 @@ export function createAvmExecutionEnvironment( current.callContext.isStaticCall, current.callContext.isDelegateCall, current.args, - GasSettings.default(), // TODO(palla/gas): Set proper values - Fr.ZERO, // TODO(palla/gas): Set proper values + gasSettings, + transactionFee, current.functionData.selector, ); } @@ -85,6 +88,9 @@ export function createPublicExecutionContext(avmContext: AvmContext, calldata: F avmContext.persistableState.hostStorage.publicStateDb, avmContext.persistableState.hostStorage.contractsDb, avmContext.persistableState.hostStorage.commitmentsDb, + Gas.from(avmContext.machineState.gasLeft), + avmContext.environment.transactionFee, + avmContext.environment.gasSettings, ); return context; @@ -168,7 +174,9 @@ export async function convertAvmResults( unencryptedLogs, reverted: result.reverted, revertReason: result.revertReason ? createSimulationError(result.revertReason) : undefined, - gasLeft: endMachineState.gasLeft, + startGasLeft: executionContext.availableGas, + endGasLeft: endMachineState.gasLeft, + transactionFee: executionContext.transactionFee, }; } From 91cc0a424031b9b8346cc9182f303d1468b1179b Mon Sep 17 00:00:00 2001 From: James Zaki Date: Thu, 25 Apr 2024 02:27:17 +0100 Subject: [PATCH 012/201] docs: Aztec smart contract tutorial - crowdfunding (#5786) A tutorial to teach some of the fundamentals of building smart contracts on Aztec. --------- Co-authored-by: josh crites --- .../functions/initializers.md | 3 +- .../tutorials/crowdfunding/donations.md | 216 ++++++++++++++++++ docs/sidebars.js | 2 +- .../crowdfunding_contract/src/main.nr | 33 ++- 4 files changed, 247 insertions(+), 7 deletions(-) create mode 100644 docs/docs/developers/tutorials/crowdfunding/donations.md diff --git a/docs/docs/developers/contracts/writing_contracts/functions/initializers.md b/docs/docs/developers/contracts/writing_contracts/functions/initializers.md index a673264a4b3..97499c4dd85 100644 --- a/docs/docs/developers/contracts/writing_contracts/functions/initializers.md +++ b/docs/docs/developers/contracts/writing_contracts/functions/initializers.md @@ -10,8 +10,7 @@ Initializers are regular functions that set an "initialized" flag (a nullifier) ## Annotate with `#[aztec(private)]` and `#[aztec(initializer)]` - -Define your initiaizer like so: +Define your initializer like so: ```rust #[aztec(private)] diff --git a/docs/docs/developers/tutorials/crowdfunding/donations.md b/docs/docs/developers/tutorials/crowdfunding/donations.md new file mode 100644 index 00000000000..16dcfae5c58 --- /dev/null +++ b/docs/docs/developers/tutorials/crowdfunding/donations.md @@ -0,0 +1,216 @@ +--- +title: Build a donations contract +tags: [developers, tutorial, example] +--- + +# Build a donations contract + +In this tutorial we'll create two contracts related to crowdfunding: + +- A crowdfunding contract with two core components + - Fully private donations + - Verifiable withdrawals to the operator +- A reward contract for anyone else to anonymously reward donors + +Along the way you will: + +- Install Aztec developer tools +- Setup a new Noir contract project +- Add base Aztec dependencies +- Call between private and public contexts +- Wrap an address with its interface (token) +- Create custom private value notes + +## Setup + +### Install tools + +Please ensure that the you already have [Installed the Sandbox](https://docs.aztec.network/developers/getting_started/quickstart#install-the-sandbox). + +And if using VSCode, see [here](https://docs.aztec.network/developers/contracts/main#install-noir-lsp-recommended) to install Noir LSP, where you'll benefit from syntax highlighting, profiling, and more. + +### Create an Aztec project + +Use `aztec-nargo` in a terminal to create a new Aztec contract project named "crowdfunding": + +```sh +aztec-nargo new --contract crowdfunding +``` + +Inside the new `crowdfunding` directory you will have a base to implement the Aztec smart contract. + +Use `aztec-nargo --help` to see other commands. + +## Private donations + +1. An "Operator" begins a Crowdfunding campaign (contract), specifying: + +- an existing token address +- their account address +- a deadline timestamp + +2. Any address can donate (in private context) + +- private transfer token from sender to contract +- transaction receipts allow private claims via another contract + +3. Only the operator can withdraw from the fund + +### 1. Create a campaign + +#### Initialize + +Open the project in your preferred editor. If using VSCode and the LSP, you'll be able to select the `aztec-nargo` binary to use (instead of `nargo`). + +In `main.nr`, rename the contract from `Main`, to `Crowdfunding`. + +#include_code empty-contract /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr rust + +Replace the example functions with an initializer that takes the required campaign info as parameters. Notice use of `#[aztec(...)]` macros inform the compiler that the function is a public initializer. + +```rust +#include_code init-header /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr raw + //... +} +``` + +More about initializers [here](../../contracts/writing_contracts/functions/initializers.md). + +#### Dependencies + +When you compile the contracts by running `aztec-nargo compile` in your project directory, you'll notice it cannot resolve `AztecAddress`. (Or hovering over in VSCode) + +```rust +#include_code init-header-error /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr raw + //... +} +``` + +Add the required dependency by going to your project's `Nargo.toml` file, and adding `aztec` from the `aztec-nr` framework. It resides in the `aztec-packages` mono-repo: + +```rust +[dependencies] +aztec = { git="https://github.com/AztecProtocol/aztec-packages/", tag="#include_aztec_version", directory="noir-projects/aztec-nr/aztec" } +``` + +A word about versions: +- Choose the aztec packages version to match your aztec tools as seen here - `aztec-cli -V` +- Check that your `compiler_version` in Nargo.toml is satisified by your aztec compiler - `aztec-nargo -V` + +More about versions [here](https://docs.aztec.network/developers/versions-updating). + +Inside the Crowdfunding contract definition, use the dependency that defines the address type `AztecAddress` (same syntax as Rust) + +```rust +use dep::aztec::protocol_types::address::AztecAddress; +``` + +The `aztec::protocol_types` can be browsed [here](https://github.com/AztecProtocol/aztec-packages/blob/#include_aztec_version/noir-projects/noir-protocol-circuits/crates/types/src). And like rust dependencies, the relative path inside the dependency corresponds to `address::AztecAddress`. + + +#### Storage + +To retain the initializer parameters in the contract's Storage, we'll need to declare them in a preceding `Storage` struct: + +#include_code storage /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr rust + +The `ValueNote` type is in the top-level of the Aztec.nr framework, namely [noir-projects/aztec-nr](https://github.com/AztecProtocol/aztec-packages/blob/#include_aztec_version/noir-projects/aztec-nr/value-note/src/value_note.nr). Like before, you'll need to add the crate to Nargo.toml + +(See [here](https://docs.aztec.network/developers/contracts/resources/dependencies) for common dependencies). + +--- + +Back in main.nr, reference `use` of the type + +```rust +use dep::value_note::value_note::ValueNote; +``` + +Now complete the initializer by setting the storage variables with the parameters: + +#include_code init /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr rust + +You can compile the code so far with `aztec-nargo compile`. + +### 2. Taking private donations + +#### Checking campaign duration against the timestamp + +To check that the donation occurs before the campaign deadline, we must access the public `timestamp`. It is one of several [Public Global Variables](https://docs.aztec.network/developers/contracts/references/globals#public-global-variables). + +Declare an Aztec function that is public and internal + +```rust +#include_code deadline-header /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr raw + //... +} +``` + +Read the deadline from storage and assert that the `timestamp` from this context is before the deadline + +#include_code deadline /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr rust + +--- + +Since donations are to be private, the donate function will have the user's private context which has these [Private Global Variables](https://docs.aztec.network/developers/contracts/references/globals#private-global-variables). So from the private context there is a little extra to call the (public internal) `_check_deadline` function. + +```rust +#include_code call-check-deadline /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr raw + //... +} +``` + +Namely calling `enqueue` and passing the (mutable) context. + +Now conclude adding all dependencies to the `Crowdfunding` contract: + +#include_code all-deps /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr rust + +Like before, you can find these and other `aztec::protocol_types` [here](https://github.com/AztecProtocol/aztec-packages/blob/#include_aztec_version/noir-projects/noir-protocol-circuits/crates/types/src). + + +#### Interfacing with another contract + +The token being used for donations is stored simply as an `AztecAddress` (named `donation_token`). so to easily use it as a token, we let the compiler know that we want the address to have a Token interface. Here we will use a maintained example Token contract. + +Add this `Token` contract to Nargo.toml: + +``` +token = { git="https://github.com/AztecProtocol/aztec-packages/", tag="#include_aztec_version", directory="noir-projects/noir-contracts/contracts/token_contract" } +``` + +With the dependency already `use`d at the start of the contract, the token contract can be called to make the transfer from msg sender to this contract. + +:::note +The user must have authorised this action (concept [here](../../../learn/concepts/accounts/main#authorizing-actions)), example use of `createAuthWit` in 'full donor flow' test [here](../../../../../yarn-project/end-to-end/src/e2e_crowdfunding_and_claim.test.ts). +::: + +#### Creating and storing a private receipt note + +The last thing to do is create a new value note and add it to the `donation_receipts`. So the full donation function is now + +#include_code donate /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr rust + +### 3. Operator withdrawals + +The remaining function to implement, `withdraw`, is reasonably straight-forward: +1. make sure the address calling is the operator address +2. transfer tokens from the contract to the operator +3. reveal that an amount has been withdrawn to the operator + +The last point is achieved by emitting an unencrypted event log, more [here](https://docs.aztec.network/developers/contracts/writing_contracts/events/emit_event#unencrypted-events). + +Copy the last function into your Crowdfunding contract: + +#include_code operator-withdrawals /noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr rust + +You should be able to compile successfully with `aztec-nargo compile`. + +## Conclusion + +For comparison, the full Crowdfunding contract can be found [here](https://github.com/AztecProtocol/aztec-packages/blob/#include_aztec_version/noir-projects/noir-contracts/contracts/crowdfunding_contract). + +### Next steps? + +If a new token wishes to honour donors with free tokens based on donation amounts, this is possible via the donation_receipts (a `PrivateSet`). +See [claim_contract](https://github.com/AztecProtocol/aztec-packages/blob/#include_aztec_version/noir-projects/noir-contracts/contracts/claim_contract). \ No newline at end of file diff --git a/docs/sidebars.js b/docs/sidebars.js index c10514ddbe9..0884c74caeb 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -228,7 +228,7 @@ const sidebars = { items: [ "developers/tutorials/writing_token_contract", "developers/tutorials/writing_private_voting_contract", - + "developers/tutorials/crowdfunding/donations", { label: "Writing a DApp", type: "category", diff --git a/noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr b/noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr index 71102fda1a9..3b1f7114772 100644 --- a/noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/crowdfunding_contract/src/main.nr @@ -1,11 +1,15 @@ +// docs:start:empty-contract contract Crowdfunding { + // docs:end:empty-contract + // docs:start:all-deps use dep::aztec::{ protocol_types::{abis::function_selector::FunctionSelector, address::AztecAddress, traits::Serialize}, state_vars::{PrivateSet, PublicImmutable, SharedImmutable} }; use dep::value_note::value_note::ValueNote; use dep::token::Token; + // docs:end:all-deps #[aztec(event)] struct WithdrawalProcessed { @@ -19,6 +23,7 @@ contract Crowdfunding { } } + // docs:start:storage #[aztec(storage)] struct Storage { // Token used for donations (e.g. DAI) @@ -27,30 +32,46 @@ contract Crowdfunding { operator: SharedImmutable, // End of the crowdfunding campaign after which no more donations are accepted deadline: PublicImmutable, - // Notes emitted to donors when they donate (later on used to claim rewards in the Claim contract) - claim_notes: PrivateSet, + // Notes emitted to donors when they donate (can be used as proof to obtain rewards, eg in Claim contracts) + donation_receipts: PrivateSet, } + // docs:end:storage + // docs:start:init + // docs:start:init-header + // docs:start:init-header-error #[aztec(public)] #[aztec(initializer)] - fn constructor(donation_token: AztecAddress, operator: AztecAddress, deadline: u64) { + // this-will-error:init-header-error + fn init(donation_token: AztecAddress, operator: AztecAddress, deadline: u64) { + // docs:end:init-header + // docs:end:init-header-error storage.donation_token.initialize(donation_token); storage.operator.initialize(operator); storage.deadline.initialize(deadline); } + // docs:end:init + // docs:start:deadline + // docs:start:deadline-header #[aztec(public)] #[aztec(internal)] fn _check_deadline() { + // docs:end:deadline-header let deadline = storage.deadline.read(); assert(context.timestamp() < deadline, "Deadline has passed"); } + // docs:end:deadline + // docs:start:donate + // docs:start:call-check-deadline #[aztec(private)] fn donate(amount: u64) { // 1) Check that the deadline has not passed Crowdfunding::at(context.this_address())._check_deadline().enqueue(&mut context); + // docs:end:call-check-deadline + // docs:start:do-transfer // 2) Transfer the donation tokens from donor to this contract Token::at(storage.donation_token.read_private()).transfer( context.msg_sender(), @@ -58,13 +79,16 @@ contract Crowdfunding { amount as Field, 0 ).call(&mut context); + // docs:end:do-transfer // 3) Create a value note for the donor so that he can later on claim a rewards token in the Claim // contract by proving that the hash of this note exists in the note hash tree. let mut note = ValueNote::new(amount as Field, context.msg_sender()); - storage.claim_notes.insert(&mut note, true); + storage.donation_receipts.insert(&mut note, true); } + // docs:end:donate + // docs:start:operator-withdrawals // Withdraws balance to the operator. Requires that msg_sender() is the operator. #[aztec(private)] fn withdraw(amount: u64) { @@ -79,4 +103,5 @@ contract Crowdfunding { let event = WithdrawalProcessed { amount, who: operator_address }; context.emit_unencrypted_log(event.serialize()); } + // docs:end:operator-withdrawals } From a0fb4f5a4403806c42fb4dde7a95b13170e1274c Mon Sep 17 00:00:00 2001 From: AztecBot Date: Thu, 25 Apr 2024 02:10:31 +0000 Subject: [PATCH 013/201] git subrepo push --branch=master barretenberg subrepo: subdir: "barretenberg" merged: "297d96b6e" upstream: origin: "https://github.com/AztecProtocol/barretenberg" branch: "master" commit: "297d96b6e" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- barretenberg/.gitrepo | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index 51be7b3baa8..bdb98ab5ee6 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = 40fdf900bf687d93bd2eb7e54ee39d336346097a - parent = d8fcfb590f788b911111010e20458797d76f5779 + commit = 297d96b6ea5fc4471d9e39993658e27affe3aecf + parent = 91cc0a424031b9b8346cc9182f303d1468b1179b method = merge cmdver = 0.4.6 From d0622cffa2dfffdf8bd96cc34627a78aeb8a72e5 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Thu, 25 Apr 2024 02:10:56 +0000 Subject: [PATCH 014/201] chore: replace relative paths to noir-protocol-circuits --- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- noir-projects/aztec-nr/tests/Nargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 7a1f1af5863..2cbb43ab278 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { path = "../../noir-protocol-circuits/crates/types" } +protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.35.1", directory="noir-projects/noir-protocol-circuits/crates/types" } diff --git a/noir-projects/aztec-nr/tests/Nargo.toml b/noir-projects/aztec-nr/tests/Nargo.toml index 13404b37324..dfed895aad0 100644 --- a/noir-projects/aztec-nr/tests/Nargo.toml +++ b/noir-projects/aztec-nr/tests/Nargo.toml @@ -6,4 +6,4 @@ type = "lib" [dependencies] aztec = { path = "../aztec" } -protocol_types = { path = "../../noir-protocol-circuits/crates/types" } +protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.35.1", directory="noir-projects/noir-protocol-circuits/crates/types" } From 4880a6d99006d291e0ceb57fdfd320028e9973f2 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Thu, 25 Apr 2024 02:10:56 +0000 Subject: [PATCH 015/201] git_subrepo.sh: Fix parent in .gitrepo file. [skip ci] --- noir-projects/aztec-nr/.gitrepo | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index 67d3d4a8106..ae17af02062 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -9,4 +9,4 @@ commit = d4be027d470ca2faf8795043b5c91bbe37990b96 method = merge cmdver = 0.4.6 - parent = 6bd644f98bae8e23c4e0de99edc12f5361c3d160 + parent = 83bbfa04c2b474444f63439e11f7f50b06c4e5bc From aeb4148c3881a1fc333bfdbfa12018beb063202b Mon Sep 17 00:00:00 2001 From: AztecBot Date: Thu, 25 Apr 2024 02:10:59 +0000 Subject: [PATCH 016/201] git subrepo push --branch=master noir-projects/aztec-nr subrepo: subdir: "noir-projects/aztec-nr" merged: "236dda47e" upstream: origin: "https://github.com/AztecProtocol/aztec-nr" branch: "master" commit: "236dda47e" git-subrepo: version: "0.4.6" origin: "???" commit: "???" [skip ci] --- noir-projects/aztec-nr/.gitrepo | 4 ++-- noir-projects/aztec-nr/aztec/Nargo.toml | 2 +- noir-projects/aztec-nr/tests/Nargo.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/noir-projects/aztec-nr/.gitrepo b/noir-projects/aztec-nr/.gitrepo index ae17af02062..5d88076f4cb 100644 --- a/noir-projects/aztec-nr/.gitrepo +++ b/noir-projects/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = d4be027d470ca2faf8795043b5c91bbe37990b96 + commit = 236dda47e529cbac73792606bc29e4b5987759d9 method = merge cmdver = 0.4.6 - parent = 83bbfa04c2b474444f63439e11f7f50b06c4e5bc + parent = a63f0cfafa7a551814f1f3aaef3cd8979fe140ec diff --git a/noir-projects/aztec-nr/aztec/Nargo.toml b/noir-projects/aztec-nr/aztec/Nargo.toml index 2cbb43ab278..7a1f1af5863 100644 --- a/noir-projects/aztec-nr/aztec/Nargo.toml +++ b/noir-projects/aztec-nr/aztec/Nargo.toml @@ -5,4 +5,4 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.35.1", directory="noir-projects/noir-protocol-circuits/crates/types" } +protocol_types = { path = "../../noir-protocol-circuits/crates/types" } diff --git a/noir-projects/aztec-nr/tests/Nargo.toml b/noir-projects/aztec-nr/tests/Nargo.toml index dfed895aad0..13404b37324 100644 --- a/noir-projects/aztec-nr/tests/Nargo.toml +++ b/noir-projects/aztec-nr/tests/Nargo.toml @@ -6,4 +6,4 @@ type = "lib" [dependencies] aztec = { path = "../aztec" } -protocol_types = { git="https://github.com/AztecProtocol/aztec-packages", tag="aztec-packages-v0.35.1", directory="noir-projects/noir-protocol-circuits/crates/types" } +protocol_types = { path = "../../noir-protocol-circuits/crates/types" } From b12f60994fdd54cb4d8e18e444c207e319f9d6a6 Mon Sep 17 00:00:00 2001 From: ludamad Date: Wed, 24 Apr 2024 23:15:20 -0500 Subject: [PATCH 017/201] chore(ci): prevent haywire logs (#5966) Note: code in json sorta sucks, I couldn't comment why this is used. There is a bad interaction between earthly x github actions x jest summary reporter. As far as I can see, it's this combination and not any piece alone (something something ansi codes, but not entirely sure). This seems like simplest fix. Without this, it was causing a lot of duplicated lines. But this particularly interacted with the poor rendering performance of github actions logs. The other piece is that its rendering is just slow in chrome for 10,000K+ lines. use raw logs, or another browser like safari --- yarn-project/accounts/package.json | 8 ++++++++ yarn-project/archiver/package.json | 8 ++++++++ yarn-project/aztec-faucet/package.json | 8 ++++++++ yarn-project/aztec-node/package.json | 8 ++++++++ yarn-project/aztec.js/package.json | 8 ++++++++ yarn-project/aztec/package.json | 8 ++++++++ yarn-project/builder/package.json | 8 ++++++++ yarn-project/circuit-types/package.json | 8 ++++++++ yarn-project/circuits.js/package.json | 10 +++++++++- yarn-project/end-to-end/jest.integration.config.json | 1 + yarn-project/end-to-end/package.json | 1 + yarn-project/entrypoints/package.json | 8 ++++++++ yarn-project/ethereum/package.json | 8 ++++++++ yarn-project/foundation/package.json | 8 ++++++++ yarn-project/key-store/package.json | 8 ++++++++ yarn-project/kv-store/package.json | 8 ++++++++ yarn-project/merkle-tree/package.json | 8 ++++++++ yarn-project/noir-contracts.js/package.json | 8 ++++++++ yarn-project/noir-protocol-circuits-types/package.json | 10 +++++++++- yarn-project/p2p-bootstrap/package.json | 8 ++++++++ yarn-project/p2p/package.json | 8 ++++++++ yarn-project/package.common.json | 1 + yarn-project/protocol-contracts/package.json | 8 ++++++++ yarn-project/prover-client/package.json | 8 ++++++++ yarn-project/pxe/package.json | 8 ++++++++ yarn-project/scripts/package.json | 8 ++++++++ yarn-project/sequencer-client/package.json | 10 +++++++++- yarn-project/simulator/package.json | 8 ++++++++ yarn-project/types/package.json | 8 ++++++++ yarn-project/world-state/package.json | 8 ++++++++ 30 files changed, 222 insertions(+), 3 deletions(-) diff --git a/yarn-project/accounts/package.json b/yarn-project/accounts/package.json index b78eb273ca5..2b3cde6cec1 100644 --- a/yarn-project/accounts/package.json +++ b/yarn-project/accounts/package.json @@ -50,6 +50,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "dependencies": { diff --git a/yarn-project/archiver/package.json b/yarn-project/archiver/package.json index e1de8022869..1eff7e25ea0 100644 --- a/yarn-project/archiver/package.json +++ b/yarn-project/archiver/package.json @@ -39,6 +39,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "dependencies": { diff --git a/yarn-project/aztec-faucet/package.json b/yarn-project/aztec-faucet/package.json index 28accc5b543..957b2203b87 100644 --- a/yarn-project/aztec-faucet/package.json +++ b/yarn-project/aztec-faucet/package.json @@ -36,6 +36,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "dependencies": { diff --git a/yarn-project/aztec-node/package.json b/yarn-project/aztec-node/package.json index 36f1026b451..43c4c4e7ad5 100644 --- a/yarn-project/aztec-node/package.json +++ b/yarn-project/aztec-node/package.json @@ -37,6 +37,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "dependencies": { diff --git a/yarn-project/aztec.js/package.json b/yarn-project/aztec.js/package.json index d4f1ad2ed96..96748862412 100644 --- a/yarn-project/aztec.js/package.json +++ b/yarn-project/aztec.js/package.json @@ -54,6 +54,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "dependencies": { diff --git a/yarn-project/aztec/package.json b/yarn-project/aztec/package.json index 5d3874c5a85..d9f79c69f3b 100644 --- a/yarn-project/aztec/package.json +++ b/yarn-project/aztec/package.json @@ -78,6 +78,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "engines": { diff --git a/yarn-project/builder/package.json b/yarn-project/builder/package.json index cac014a948c..cb01a3034bc 100644 --- a/yarn-project/builder/package.json +++ b/yarn-project/builder/package.json @@ -46,6 +46,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "dependencies": { diff --git a/yarn-project/circuit-types/package.json b/yarn-project/circuit-types/package.json index d0a245f9f42..8779c385af2 100644 --- a/yarn-project/circuit-types/package.json +++ b/yarn-project/circuit-types/package.json @@ -41,6 +41,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "dependencies": { diff --git a/yarn-project/circuits.js/package.json b/yarn-project/circuits.js/package.json index e3ef5ffd921..07c5759f8a2 100644 --- a/yarn-project/circuits.js/package.json +++ b/yarn-project/circuits.js/package.json @@ -78,6 +78,14 @@ "^(\\.{1,2}/.*)\\.[cm]?js$": "$1" }, "testRegex": "./src/.*\\.test\\.(js|mjs|ts)$", - "rootDir": "./src" + "rootDir": "./src", + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] + ] } } diff --git a/yarn-project/end-to-end/jest.integration.config.json b/yarn-project/end-to-end/jest.integration.config.json index c29166140a6..84d61df320c 100644 --- a/yarn-project/end-to-end/jest.integration.config.json +++ b/yarn-project/end-to-end/jest.integration.config.json @@ -6,6 +6,7 @@ "moduleNameMapper": { "^(\\.{1,2}/.*)\\.js$": "$1" }, + "reporters": [["default", {"summaryThreshold": 9999}]], "testRegex": "./src/.*\\.test\\.ts$", "rootDir": "./src" } diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index 3ff8401dd77..524ecfedde8 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -110,6 +110,7 @@ "@swc/jest" ] }, + "reporters": [["default", {"summaryThreshold": 9999}]], "moduleNameMapper": { "^(\\.{1,2}/.*)\\.[cm]?js$": "$1" }, diff --git a/yarn-project/entrypoints/package.json b/yarn-project/entrypoints/package.json index 3ee6b4bbe95..63470f19789 100644 --- a/yarn-project/entrypoints/package.json +++ b/yarn-project/entrypoints/package.json @@ -40,6 +40,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "dependencies": { diff --git a/yarn-project/ethereum/package.json b/yarn-project/ethereum/package.json index f844712e9ef..efd72f2d3da 100644 --- a/yarn-project/ethereum/package.json +++ b/yarn-project/ethereum/package.json @@ -56,6 +56,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "engines": { diff --git a/yarn-project/foundation/package.json b/yarn-project/foundation/package.json index df881ec593c..a4b504dfb1e 100644 --- a/yarn-project/foundation/package.json +++ b/yarn-project/foundation/package.json @@ -67,6 +67,14 @@ "rootDir": "./src", "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "dependencies": { diff --git a/yarn-project/key-store/package.json b/yarn-project/key-store/package.json index 27830c38bae..79ce75204c3 100644 --- a/yarn-project/key-store/package.json +++ b/yarn-project/key-store/package.json @@ -34,6 +34,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "dependencies": { diff --git a/yarn-project/kv-store/package.json b/yarn-project/kv-store/package.json index d22d0190c7d..0fcb06fd0e3 100644 --- a/yarn-project/kv-store/package.json +++ b/yarn-project/kv-store/package.json @@ -33,6 +33,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "dependencies": { diff --git a/yarn-project/merkle-tree/package.json b/yarn-project/merkle-tree/package.json index 70f816d62b5..8d19e74c5a5 100644 --- a/yarn-project/merkle-tree/package.json +++ b/yarn-project/merkle-tree/package.json @@ -36,6 +36,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "dependencies": { diff --git a/yarn-project/noir-contracts.js/package.json b/yarn-project/noir-contracts.js/package.json index 811ab20a72b..138a1771247 100644 --- a/yarn-project/noir-contracts.js/package.json +++ b/yarn-project/noir-contracts.js/package.json @@ -34,6 +34,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "dependencies": { diff --git a/yarn-project/noir-protocol-circuits-types/package.json b/yarn-project/noir-protocol-circuits-types/package.json index 01acd5cc549..c8bcbff9171 100644 --- a/yarn-project/noir-protocol-circuits-types/package.json +++ b/yarn-project/noir-protocol-circuits-types/package.json @@ -34,7 +34,15 @@ "^.+\\.tsx?$": [ "@swc/jest" ] - } + }, + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] + ] }, "dependencies": { "@aztec/builder": "workspace:^", diff --git a/yarn-project/p2p-bootstrap/package.json b/yarn-project/p2p-bootstrap/package.json index c715d62a522..d4a718755ad 100644 --- a/yarn-project/p2p-bootstrap/package.json +++ b/yarn-project/p2p-bootstrap/package.json @@ -56,6 +56,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "engines": { diff --git a/yarn-project/p2p/package.json b/yarn-project/p2p/package.json index 8ba5ec40883..8ec12b13a0e 100644 --- a/yarn-project/p2p/package.json +++ b/yarn-project/p2p/package.json @@ -36,6 +36,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "dependencies": { diff --git a/yarn-project/package.common.json b/yarn-project/package.common.json index 3baad97350e..80b56a8d697 100644 --- a/yarn-project/package.common.json +++ b/yarn-project/package.common.json @@ -24,6 +24,7 @@ "moduleNameMapper": { "^(\\.{1,2}/.*)\\.[cm]?js$": "$1" }, + "reporters": [["default", {"summaryThreshold": 9999}]], "testRegex": "./src/.*\\.test\\.(js|mjs|ts)$", "rootDir": "./src" } diff --git a/yarn-project/protocol-contracts/package.json b/yarn-project/protocol-contracts/package.json index d0d254c2dee..d05a83250fd 100644 --- a/yarn-project/protocol-contracts/package.json +++ b/yarn-project/protocol-contracts/package.json @@ -45,6 +45,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "dependencies": { diff --git a/yarn-project/prover-client/package.json b/yarn-project/prover-client/package.json index 989fd4ff340..132403cbb7f 100644 --- a/yarn-project/prover-client/package.json +++ b/yarn-project/prover-client/package.json @@ -39,6 +39,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "dependencies": { diff --git a/yarn-project/pxe/package.json b/yarn-project/pxe/package.json index 03c104943be..a7c36b728b7 100644 --- a/yarn-project/pxe/package.json +++ b/yarn-project/pxe/package.json @@ -37,6 +37,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "dependencies": { diff --git a/yarn-project/scripts/package.json b/yarn-project/scripts/package.json index 94e2988785e..e7a327778e6 100644 --- a/yarn-project/scripts/package.json +++ b/yarn-project/scripts/package.json @@ -64,6 +64,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "engines": { diff --git a/yarn-project/sequencer-client/package.json b/yarn-project/sequencer-client/package.json index 47336f4a507..a8479023370 100644 --- a/yarn-project/sequencer-client/package.json +++ b/yarn-project/sequencer-client/package.json @@ -85,6 +85,14 @@ "^(\\.{1,2}/.*)\\.[cm]?js$": "$1" }, "testRegex": "./src/.*\\.test\\.(js|mjs|ts)$", - "rootDir": "./src" + "rootDir": "./src", + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] + ] } } diff --git a/yarn-project/simulator/package.json b/yarn-project/simulator/package.json index 1c13ca0a631..b7fd60c5c29 100644 --- a/yarn-project/simulator/package.json +++ b/yarn-project/simulator/package.json @@ -34,6 +34,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "dependencies": { diff --git a/yarn-project/types/package.json b/yarn-project/types/package.json index 9832f31fb76..4abfe1f9a65 100644 --- a/yarn-project/types/package.json +++ b/yarn-project/types/package.json @@ -39,6 +39,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "dependencies": { diff --git a/yarn-project/world-state/package.json b/yarn-project/world-state/package.json index 2cae20bf522..5e3dd632a20 100644 --- a/yarn-project/world-state/package.json +++ b/yarn-project/world-state/package.json @@ -34,6 +34,14 @@ }, "extensionsToTreatAsEsm": [ ".ts" + ], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] ] }, "dependencies": { From 7eb164f28a65426482557cc5dfcb31b9e7c23ab9 Mon Sep 17 00:00:00 2001 From: ludamad Date: Wed, 24 Apr 2024 23:16:54 -0500 Subject: [PATCH 018/201] chore(ci): fix concurrency key (#5962) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6dada291720..ce28bee2c1a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -112,7 +112,7 @@ jobs: - uses: ./.github/ci-setup-action with: dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" - concurrency_key: yarn-project-test-${{ github.actor }}-x86 + concurrency_key: yarn-project-formatting-${{ github.actor }}-x86 - name: "Yarn Project Tests" timeout-minutes: 25 run: earthly --no-output ./yarn-project/+format-check From dafb3edc799b2adaf285ffe57b41630040c68449 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Thu, 25 Apr 2024 10:18:14 +0100 Subject: [PATCH 019/201] refactor: replace queue with facade over CircuitProver (#5972) This PR removes the notion of "proving queue" and keeps it only as an implementation detail for a prover --- yarn-project/aztec/tsconfig.json | 6 +- .../circuit-types/src/interfaces/index.ts | 1 + .../src/interfaces/proving-job.ts} | 16 +- yarn-project/end-to-end/package.json | 9 +- .../tsconfig.json | 6 +- .../prover-client/src/mocks/test_context.ts | 10 +- .../src/orchestrator/orchestrator.ts | 251 +++++++++--------- .../orchestrator_failures.test.ts | 11 +- .../src/orchestrator/tx-proving-state.ts | 16 -- .../src/prover-pool/circuit-prover-agent.ts | 107 -------- .../prover-client/src/prover-pool/index.ts | 3 + .../prover-pool/memory-proving-queue.test.ts | 26 +- .../src/prover-pool/memory-proving-queue.ts | 117 +++++++- ...ver-agent.test.ts => prover-agent.test.ts} | 34 +-- .../src/prover-pool/prover-agent.ts | 123 ++++++++- .../src/prover-pool/prover-pool.ts | 27 +- .../src/prover-pool/proving-queue.ts | 23 -- .../prover-client/src/tx-prover/tx-prover.ts | 10 +- yarn-project/pxe/tsconfig.json | 6 +- 19 files changed, 435 insertions(+), 367 deletions(-) rename yarn-project/{prover-client/src/prover-pool/proving-request.ts => circuit-types/src/interfaces/proving-job.ts} (82%) delete mode 100644 yarn-project/prover-client/src/prover-pool/circuit-prover-agent.ts create mode 100644 yarn-project/prover-client/src/prover-pool/index.ts rename yarn-project/prover-client/src/prover-pool/{circuit-prover-agent.test.ts => prover-agent.test.ts} (70%) delete mode 100644 yarn-project/prover-client/src/prover-pool/proving-queue.ts diff --git a/yarn-project/aztec/tsconfig.json b/yarn-project/aztec/tsconfig.json index ff9caf3d51b..457e0776787 100644 --- a/yarn-project/aztec/tsconfig.json +++ b/yarn-project/aztec/tsconfig.json @@ -18,6 +18,9 @@ { "path": "../aztec.js" }, + { + "path": "../builder" + }, { "path": "../circuit-types" }, @@ -39,9 +42,6 @@ { "path": "../l1-artifacts" }, - { - "path": "../builder" - }, { "path": "../noir-contracts.js" }, diff --git a/yarn-project/circuit-types/src/interfaces/index.ts b/yarn-project/circuit-types/src/interfaces/index.ts index 25d6f63bd82..71bfeeda4a2 100644 --- a/yarn-project/circuit-types/src/interfaces/index.ts +++ b/yarn-project/circuit-types/src/interfaces/index.ts @@ -6,4 +6,5 @@ export * from './configs.js'; export * from './nullifier_tree.js'; export * from './public_data_tree.js'; export * from './prover-client.js'; +export * from './proving-job.js'; export * from './block-prover.js'; diff --git a/yarn-project/prover-client/src/prover-pool/proving-request.ts b/yarn-project/circuit-types/src/interfaces/proving-job.ts similarity index 82% rename from yarn-project/prover-client/src/prover-pool/proving-request.ts rename to yarn-project/circuit-types/src/interfaces/proving-job.ts index ef98e7e18cd..503de6bd820 100644 --- a/yarn-project/prover-client/src/prover-pool/proving-request.ts +++ b/yarn-project/circuit-types/src/interfaces/proving-job.ts @@ -1,4 +1,3 @@ -import { type PublicKernelNonTailRequest, type PublicKernelTailRequest } from '@aztec/circuit-types'; import { type BaseOrMergeRollupPublicInputs, type BaseParityInputs, @@ -13,6 +12,13 @@ import { type RootRollupPublicInputs, } from '@aztec/circuits.js'; +import type { PublicKernelNonTailRequest, PublicKernelTailRequest } from '../tx/processed_tx.js'; + +export type ProvingJob = { + id: string; + request: T; +}; + export enum ProvingRequestType { PUBLIC_VM, @@ -79,3 +85,11 @@ export type ProvingRequestPublicInputs = { }; export type ProvingRequestResult = [ProvingRequestPublicInputs[T], Proof]; + +export interface ProvingJobSource { + getProvingJob(): Promise | null>; + + resolveProvingJob(jobId: string, result: ProvingRequestResult): Promise; + + rejectProvingJob(jobId: string, reason: Error): Promise; +} diff --git a/yarn-project/end-to-end/package.json b/yarn-project/end-to-end/package.json index 524ecfedde8..e5e5c0ba6ba 100644 --- a/yarn-project/end-to-end/package.json +++ b/yarn-project/end-to-end/package.json @@ -110,7 +110,14 @@ "@swc/jest" ] }, - "reporters": [["default", {"summaryThreshold": 9999}]], + "reporters": [ + [ + "default", + { + "summaryThreshold": 9999 + } + ] + ], "moduleNameMapper": { "^(\\.{1,2}/.*)\\.[cm]?js$": "$1" }, diff --git a/yarn-project/noir-protocol-circuits-types/tsconfig.json b/yarn-project/noir-protocol-circuits-types/tsconfig.json index b932dd11a80..632b9eed778 100644 --- a/yarn-project/noir-protocol-circuits-types/tsconfig.json +++ b/yarn-project/noir-protocol-circuits-types/tsconfig.json @@ -7,13 +7,13 @@ }, "references": [ { - "path": "../circuits.js" + "path": "../builder" }, { - "path": "../foundation" + "path": "../circuits.js" }, { - "path": "../builder" + "path": "../foundation" }, { "path": "../types" diff --git a/yarn-project/prover-client/src/mocks/test_context.ts b/yarn-project/prover-client/src/mocks/test_context.ts index 0fe75860cc0..75abb24ada6 100644 --- a/yarn-project/prover-client/src/mocks/test_context.ts +++ b/yarn-project/prover-client/src/mocks/test_context.ts @@ -21,7 +21,8 @@ import * as fs from 'fs/promises'; import { type MockProxy, mock } from 'jest-mock-extended'; import { ProvingOrchestrator } from '../orchestrator/orchestrator.js'; -import { CircuitProverAgent } from '../prover-pool/circuit-prover-agent.js'; +import { MemoryProvingQueue } from '../prover-pool/memory-proving-queue.js'; +import { ProverAgent } from '../prover-pool/prover-agent.js'; import { ProverPool } from '../prover-pool/prover-pool.js'; import { type BBProverConfig } from '../prover/bb_prover.js'; import { type CircuitProver } from '../prover/interface.js'; @@ -87,10 +88,11 @@ export class TestContext { localProver = await createProver(bbConfig); } - const proverPool = new ProverPool(proverCount, i => new CircuitProverAgent(localProver, 10, `${i}`)); - const orchestrator = new ProvingOrchestrator(actualDb, proverPool.queue); + const queue = new MemoryProvingQueue(); + const proverPool = new ProverPool(proverCount, i => new ProverAgent(localProver, 10, `${i}`)); + const orchestrator = new ProvingOrchestrator(actualDb, queue); - await proverPool.start(); + await proverPool.start(queue); return new this( publicExecutor, diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index 743d1319d32..84e5554361d 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -3,6 +3,7 @@ import { L2Block, MerkleTreeId, type ProcessedTx, + type PublicKernelRequest, PublicKernelType, type TxEffect, toTxEffect, @@ -20,11 +21,13 @@ import { type BaseRollupInputs, Fr, type GlobalVariables, + type KernelCircuitPublicInputs, L1_TO_L2_MSG_SUBTREE_HEIGHT, L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, NUM_BASE_PARITY_PER_ROOT_PARITY, type Proof, + type PublicKernelCircuitPublicInputs, RootParityInput, RootParityInputs, makeEmptyProof, @@ -34,17 +37,13 @@ import { padArrayEnd } from '@aztec/foundation/collection'; import { createDebugLogger } from '@aztec/foundation/log'; import { promiseWithResolvers } from '@aztec/foundation/promise'; import { type Tuple } from '@aztec/foundation/serialize'; +import { sleep } from '@aztec/foundation/sleep'; import { Timer } from '@aztec/foundation/timer'; import { type MerkleTreeOperations } from '@aztec/world-state'; import { inspect } from 'util'; -import { type ProvingQueue } from '../prover-pool/proving-queue.js'; -import { - type ProvingRequest, - type ProvingRequestPublicInputs, - ProvingRequestType, -} from '../prover-pool/proving-request.js'; +import { type CircuitProver } from '../prover/interface.js'; import { buildBaseRollupInput, createMergeRollupInputs, @@ -81,7 +80,7 @@ const KernelTypesWithoutFunctions: Set = new Set( + private deferredProving( provingState: ProvingState | undefined, - request: T, - callback: (output: ProvingRequestPublicInputs[T['type']], proof: Proof) => void | Promise, + request: () => Promise, + callback: (result: T, durationMs: number) => void | Promise, ) { if (!provingState?.verifyState()) { - logger.debug(`Not enqueuing job type ${ProvingRequestType[request.type]}, state no longer valid`); + logger.debug(`Not enqueuing job, state no longer valid`); return; } // We use a 'safeJob'. We don't want promise rejections in the proving pool, we want to capture the error here @@ -315,32 +313,17 @@ export class ProvingOrchestrator { const safeJob = async () => { try { const timer = new Timer(); - const [publicInputs, proof] = await this.queue.prove(request); + const result = await request(); const duration = timer.ms(); - const inputSize = 'toBuffer' in request.inputs ? request.inputs.toBuffer().length : 0; - const outputSize = 'toBuffer' in publicInputs ? publicInputs.toBuffer().length : 0; - const circuitName = this.getCircuitNameFromRequest(request); - const stats: CircuitSimulationStats | undefined = circuitName - ? { - eventName: 'circuit-simulation', - circuitName, - duration, - inputSize, - outputSize, - } - : undefined; - - logger.debug(`Simulated ${ProvingRequestType[request.type]} circuit duration=${duration}ms`, stats); - if (!provingState?.verifyState()) { - logger.debug(`State no longer valid, discarding result of job type ${ProvingRequestType[request.type]}`); + logger.debug(`State no longer valid, discarding result`); return; } - await callback(publicInputs, proof); + await callback(result, duration); } catch (err) { - logger.error(`Error thrown when proving job type ${ProvingRequestType[request.type]}: ${err}`); + logger.error(`Error thrown when proving job`); provingState!.reject(`${err}`); } }; @@ -349,38 +332,34 @@ export class ProvingOrchestrator { setImmediate(safeJob); } - private getCircuitNameFromRequest(request: ProvingRequest): CircuitSimulationStats['circuitName'] | null { - switch (request.type) { - case ProvingRequestType.PUBLIC_VM: - return null; - case ProvingRequestType.PUBLIC_KERNEL_NON_TAIL: - switch (request.kernelType) { - case PublicKernelType.SETUP: - return 'public-kernel-setup'; - case PublicKernelType.APP_LOGIC: - return 'public-kernel-app-logic'; - case PublicKernelType.TEARDOWN: - return 'public-kernel-teardown'; - default: - return null; - } - case ProvingRequestType.PUBLIC_KERNEL_TAIL: - switch (request.kernelType) { - case PublicKernelType.TAIL: - return 'public-kernel-tail'; - default: - return null; + private emitCircuitSimulationStats( + circuitName: CircuitSimulationStats['circuitName'] | null, + inputSize: number, + outputSize: number, + duration: number, + ) { + const stats: CircuitSimulationStats | undefined = circuitName + ? { + eventName: 'circuit-simulation', + circuitName, + duration, + inputSize, + outputSize, } - case ProvingRequestType.BASE_ROLLUP: - return 'base-rollup'; - case ProvingRequestType.MERGE_ROLLUP: - return 'merge-rollup'; - case ProvingRequestType.ROOT_ROLLUP: - return 'root-rollup'; - case ProvingRequestType.BASE_PARITY: - return 'base-parity'; - case ProvingRequestType.ROOT_PARITY: - return 'root-parity'; + : undefined; + logger.debug(`Simulated ${circuitName} circuit duration=${duration}ms`, stats); + } + + private getPublicKernelCircuitName(request: PublicKernelRequest) { + switch (request.type) { + case PublicKernelType.SETUP: + return 'public-kernel-setup'; + case PublicKernelType.APP_LOGIC: + return 'public-kernel-app-logic'; + case PublicKernelType.TEARDOWN: + return 'public-kernel-teardown'; + case PublicKernelType.TAIL: + return 'public-kernel-tail'; default: return null; } @@ -458,13 +437,17 @@ export class ProvingOrchestrator { return; } - this.enqueueJob( + this.deferredProving( provingState, - { - inputs: tx.baseRollupInputs, - type: ProvingRequestType.BASE_ROLLUP, - }, - (publicInputs, proof) => { + () => this.prover.getBaseRollupProof(tx.baseRollupInputs), + ([publicInputs, proof], duration) => { + this.emitCircuitSimulationStats( + 'base-rollup', + tx.baseRollupInputs.toBuffer().length, + publicInputs.toBuffer().length, + duration, + ); + validatePartialState(publicInputs.end, tx.treeSnapshots); const currentLevel = provingState.numMergeLevels + 1n; this.storeAndExecuteNextMergeLevel(provingState, currentLevel, index, [publicInputs, proof]); @@ -485,13 +468,16 @@ export class ProvingOrchestrator { [mergeInputData.inputs[1]!, mergeInputData.proofs[1]!], ); - this.enqueueJob( + this.deferredProving( provingState, - { - type: ProvingRequestType.MERGE_ROLLUP, - inputs, - }, - (publicInputs, proof) => { + () => this.prover.getMergeRollupProof(inputs), + ([publicInputs, proof], duration) => { + this.emitCircuitSimulationStats( + 'merge-rollup', + inputs.toBuffer().length, + publicInputs.toBuffer().length, + duration, + ); this.storeAndExecuteNextMergeLevel(provingState, level, index, [publicInputs, proof]); }, ); @@ -518,13 +504,17 @@ export class ProvingOrchestrator { this.db, ); - this.enqueueJob( + this.deferredProving( provingState, - { - type: ProvingRequestType.ROOT_ROLLUP, - inputs, - }, - (publicInputs, proof) => { + () => this.prover.getRootRollupProof(inputs), + ([publicInputs, proof], duration) => { + this.emitCircuitSimulationStats( + 'root-rollup', + inputs.toBuffer().length, + publicInputs.toBuffer().length, + duration, + ); + provingState.rootRollupPublicInputs = publicInputs; provingState.finalProof = proof; @@ -539,13 +529,16 @@ export class ProvingOrchestrator { // Executes the base parity circuit and stores the intermediate state for the root parity circuit // Enqueues the root parity circuit if all inputs are available private enqueueBaseParityCircuit(provingState: ProvingState, inputs: BaseParityInputs, index: number) { - this.enqueueJob( + this.deferredProving( provingState, - { - inputs, - type: ProvingRequestType.BASE_PARITY, - }, - (publicInputs, proof) => { + () => this.prover.getBaseParityProof(inputs), + ([publicInputs, proof], duration) => { + this.emitCircuitSimulationStats( + 'base-parity', + inputs.toBuffer().length, + publicInputs.toBuffer().length, + duration, + ); const rootInput = new RootParityInput(proof, publicInputs); provingState.setRootParityInputs(rootInput, index); const rootParityInputs = new RootParityInputs( @@ -559,13 +552,16 @@ export class ProvingOrchestrator { // Runs the root parity circuit ans stored the outputs // Enqueues the root rollup proof if all inputs are available private enqueueRootParityCircuit(provingState: ProvingState | undefined, inputs: RootParityInputs) { - this.enqueueJob( + this.deferredProving( provingState, - { - type: ProvingRequestType.ROOT_PARITY, - inputs, - }, - async (publicInputs, proof) => { + () => this.prover.getRootParityProof(inputs), + async ([publicInputs, proof], duration) => { + this.emitCircuitSimulationStats( + 'root-parity', + inputs.toBuffer().length, + publicInputs.toBuffer().length, + duration, + ); const rootInput = new RootParityInput(proof, publicInputs); provingState!.finalRootParityInput = rootInput; await this.checkAndEnqueueRootRollup(provingState); @@ -629,13 +625,10 @@ export class ProvingOrchestrator { // Prove the VM if this is a kernel that requires one if (!KernelTypesWithoutFunctions.has(publicFunction.publicKernelRequest.type)) { // Just sleep for a small amount of time - this.enqueueJob( + this.deferredProving( provingState, - { - type: ProvingRequestType.PUBLIC_VM, - inputs: {}, - }, - (_1, _2) => { + () => sleep(100), + () => { logger.debug(`Proven VM for function index ${functionIndex} of tx index ${txIndex}`); this.checkAndEnqueuePublicKernel(provingState, txIndex, functionIndex); }, @@ -672,30 +665,46 @@ export class ProvingOrchestrator { } const txProvingState = provingState.getTxProvingState(txIndex); - const provingRequest = txProvingState.getPublicFunctionState(functionIndex).provingRequest; - - this.enqueueJob(provingState, provingRequest, (_, proof) => { - logger.debug(`Proven ${PublicKernelType[provingRequest.type]} at index ${functionIndex} for tx index ${txIndex}`); - const nextKernelRequest = txProvingState.getNextPublicKernelFromKernelProof(functionIndex, proof); - // What's the status of the next kernel? - if (nextKernelRequest.code === TX_PROVING_CODE.NOT_READY) { - // Must be waiting on a VM proof - return; - } + const request = txProvingState.getPublicFunctionState(functionIndex).publicKernelRequest; - if (nextKernelRequest.code === TX_PROVING_CODE.COMPLETED) { - // We must have completed all public function proving, we now move to the base rollup - logger.debug(`Public functions completed for tx ${txIndex} enqueueing base rollup`); - this.enqueueBaseRollup(provingState, BigInt(txIndex), txProvingState); - return; - } - // There must be another kernel ready to be proven - if (nextKernelRequest.function === undefined) { - // Should not be possible - throw new Error(`Error occurred, public function request undefined after kernel proof completed`); - } + this.deferredProving( + provingState, + (): Promise<[KernelCircuitPublicInputs | PublicKernelCircuitPublicInputs, Proof]> => { + if (request.type === PublicKernelType.TAIL) { + return this.prover.getPublicTailProof(request); + } else { + return this.prover.getPublicKernelProof(request); + } + }, + ([_, proof], duration) => { + this.emitCircuitSimulationStats( + this.getPublicKernelCircuitName(request), + request.inputs.toBuffer().length, + 0, + duration, + ); - this.enqueuePublicKernel(provingState, txIndex, functionIndex + 1); - }); + const nextKernelRequest = txProvingState.getNextPublicKernelFromKernelProof(functionIndex, proof); + // What's the status of the next kernel? + if (nextKernelRequest.code === TX_PROVING_CODE.NOT_READY) { + // Must be waiting on a VM proof + return; + } + + if (nextKernelRequest.code === TX_PROVING_CODE.COMPLETED) { + // We must have completed all public function proving, we now move to the base rollup + logger.debug(`Public functions completed for tx ${txIndex} enqueueing base rollup`); + this.enqueueBaseRollup(provingState, BigInt(txIndex), txProvingState); + return; + } + // There must be another kernel ready to be proven + if (nextKernelRequest.function === undefined) { + // Should not be possible + throw new Error(`Error occurred, public function request undefined after kernel proof completed`); + } + + this.enqueuePublicKernel(provingState, txIndex, functionIndex + 1); + }, + ); } } diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts b/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts index d8b7f1e6c69..5b06d8418fe 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator_failures.test.ts @@ -6,7 +6,8 @@ import { jest } from '@jest/globals'; import { makeEmptyProcessedTestTx } from '../mocks/fixtures.js'; import { TestContext } from '../mocks/test_context.js'; -import { CircuitProverAgent } from '../prover-pool/circuit-prover-agent.js'; +import { MemoryProvingQueue } from '../prover-pool/memory-proving-queue.js'; +import { ProverAgent } from '../prover-pool/prover-agent.js'; import { ProverPool } from '../prover-pool/prover-pool.js'; import { type CircuitProver } from '../prover/index.js'; import { TestCircuitProver } from '../prover/test_circuit_prover.js'; @@ -29,12 +30,14 @@ describe('prover/orchestrator/failures', () => { describe('error handling', () => { let mockProver: CircuitProver; + let queue: MemoryProvingQueue; beforeEach(async () => { mockProver = new TestCircuitProver(new WASMSimulator()); - proverPool = new ProverPool(1, i => new CircuitProverAgent(mockProver, 10, `${i}`)); - orchestrator = new ProvingOrchestrator(context.actualDb, proverPool.queue); - await proverPool.start(); + proverPool = new ProverPool(1, i => new ProverAgent(mockProver, 10, `${i}`)); + queue = new MemoryProvingQueue(); + orchestrator = new ProvingOrchestrator(context.actualDb, queue); + await proverPool.start(queue); }); afterEach(async () => { diff --git a/yarn-project/prover-client/src/orchestrator/tx-proving-state.ts b/yarn-project/prover-client/src/orchestrator/tx-proving-state.ts index 73339d00d6d..ab8802d2fcc 100644 --- a/yarn-project/prover-client/src/orchestrator/tx-proving-state.ts +++ b/yarn-project/prover-client/src/orchestrator/tx-proving-state.ts @@ -1,8 +1,6 @@ import { type MerkleTreeId, type ProcessedTx, type PublicKernelRequest, PublicKernelType } from '@aztec/circuit-types'; import { type AppendOnlyTreeSnapshot, type BaseRollupInputs, type Proof } from '@aztec/circuits.js'; -import { type ProvingRequest, ProvingRequestType } from '../prover-pool/proving-request.js'; - export enum TX_PROVING_CODE { NOT_READY, READY, @@ -14,7 +12,6 @@ export type PublicFunction = { previousProofType: PublicKernelType; previousKernelProof: Proof | undefined; publicKernelRequest: PublicKernelRequest; - provingRequest: ProvingRequest; }; // Type encapsulating the instruction to the orchestrator as to what @@ -40,24 +37,11 @@ export class TxProvingState { let previousKernelProof: Proof | undefined = processedTx.proof; let previousProofType = PublicKernelType.NON_PUBLIC; for (const kernelRequest of processedTx.publicKernelRequests) { - const provingRequest: ProvingRequest = - kernelRequest.type === PublicKernelType.TAIL - ? { - type: ProvingRequestType.PUBLIC_KERNEL_TAIL, - kernelType: kernelRequest.type, - inputs: kernelRequest.inputs, - } - : { - type: ProvingRequestType.PUBLIC_KERNEL_NON_TAIL, - kernelType: kernelRequest.type, - inputs: kernelRequest.inputs, - }; const publicFunction: PublicFunction = { vmProof: undefined, previousProofType, previousKernelProof, publicKernelRequest: kernelRequest, - provingRequest, }; this.publicFunctions.push(publicFunction); previousKernelProof = undefined; diff --git a/yarn-project/prover-client/src/prover-pool/circuit-prover-agent.ts b/yarn-project/prover-client/src/prover-pool/circuit-prover-agent.ts deleted file mode 100644 index 76553512eb2..00000000000 --- a/yarn-project/prover-client/src/prover-pool/circuit-prover-agent.ts +++ /dev/null @@ -1,107 +0,0 @@ -import { makeEmptyProof } from '@aztec/circuits.js'; -import { createDebugLogger } from '@aztec/foundation/log'; -import { RunningPromise } from '@aztec/foundation/running-promise'; -import { elapsed } from '@aztec/foundation/timer'; - -import { type CircuitProver } from '../prover/interface.js'; -import { type ProvingAgent } from './prover-agent.js'; -import { type ProvingQueueConsumer } from './proving-queue.js'; -import { type ProvingRequest, type ProvingRequestResult, ProvingRequestType } from './proving-request.js'; - -export class CircuitProverAgent implements ProvingAgent { - private runningPromise?: RunningPromise; - - constructor( - /** The prover implementation to defer jobs to */ - private prover: CircuitProver, - /** How long to wait between jobs */ - private intervalMs = 10, - /** A name for this agent (if there are multiple agents running) */ - name = '', - private log = createDebugLogger('aztec:prover-client:prover-pool:agent' + (name ? `:${name}` : '')), - ) {} - - start(queue: ProvingQueueConsumer): void { - if (this.runningPromise) { - throw new Error('Agent is already running'); - } - - this.runningPromise = new RunningPromise(async () => { - const job = await queue.getProvingJob(); - if (!job) { - return; - } - - try { - const [time, result] = await elapsed(() => this.work(job.request)); - await queue.resolveProvingJob(job.id, result); - this.log.info( - `Processed proving job id=${job.id} type=${ProvingRequestType[job.request.type]} duration=${time}ms`, - ); - } catch (err) { - this.log.error( - `Error processing proving job id=${job.id} type=${ProvingRequestType[job.request.type]}: ${err}`, - ); - await queue.rejectProvingJob(job.id, err as Error); - } - }, this.intervalMs); - - this.runningPromise.start(); - } - - async stop(): Promise { - if (!this.runningPromise) { - throw new Error('Agent is not running'); - } - - await this.runningPromise.stop(); - this.runningPromise = undefined; - } - - private work(request: ProvingRequest): Promise> { - const { type, inputs } = request; - switch (type) { - case ProvingRequestType.PUBLIC_VM: { - return Promise.resolve([{}, makeEmptyProof()] as const); - } - - case ProvingRequestType.PUBLIC_KERNEL_NON_TAIL: { - return this.prover.getPublicKernelProof({ - type: request.kernelType, - inputs, - }); - } - - case ProvingRequestType.PUBLIC_KERNEL_TAIL: { - return this.prover.getPublicTailProof({ - type: request.kernelType, - inputs, - }); - } - - case ProvingRequestType.BASE_ROLLUP: { - return this.prover.getBaseRollupProof(inputs); - } - - case ProvingRequestType.MERGE_ROLLUP: { - return this.prover.getMergeRollupProof(inputs); - } - - case ProvingRequestType.ROOT_ROLLUP: { - return this.prover.getRootRollupProof(inputs); - } - - case ProvingRequestType.BASE_PARITY: { - return this.prover.getBaseParityProof(inputs); - } - - case ProvingRequestType.ROOT_PARITY: { - return this.prover.getRootParityProof(inputs); - } - - default: { - return Promise.reject(new Error(`Invalid proof request type: ${type}`)); - } - } - } -} diff --git a/yarn-project/prover-client/src/prover-pool/index.ts b/yarn-project/prover-client/src/prover-pool/index.ts new file mode 100644 index 00000000000..eaae01068bc --- /dev/null +++ b/yarn-project/prover-client/src/prover-pool/index.ts @@ -0,0 +1,3 @@ +export * from './prover-agent.js'; +export * from './memory-proving-queue.js'; +export * from './prover-pool.js'; diff --git a/yarn-project/prover-client/src/prover-pool/memory-proving-queue.test.ts b/yarn-project/prover-client/src/prover-pool/memory-proving-queue.test.ts index cea156acfc6..57c730afa95 100644 --- a/yarn-project/prover-client/src/prover-pool/memory-proving-queue.test.ts +++ b/yarn-project/prover-client/src/prover-pool/memory-proving-queue.test.ts @@ -1,3 +1,4 @@ +import { ProvingRequestType } from '@aztec/circuit-types'; import { makeBaseParityInputs, makeBaseRollupInputs, @@ -6,26 +7,17 @@ import { } from '@aztec/circuits.js/testing'; import { MemoryProvingQueue } from './memory-proving-queue.js'; -import { type ProvingQueue } from './proving-queue.js'; -import { ProvingRequestType } from './proving-request.js'; describe('MemoryProvingQueue', () => { - let queue: ProvingQueue; + let queue: MemoryProvingQueue; beforeEach(() => { queue = new MemoryProvingQueue(); }); it('returns jobs in order', async () => { - void queue.prove({ - type: ProvingRequestType.BASE_PARITY, - inputs: makeBaseParityInputs(), - }); - - void queue.prove({ - type: ProvingRequestType.BASE_ROLLUP, - inputs: makeBaseRollupInputs(), - }); + void queue.getBaseParityProof(makeBaseParityInputs()); + void queue.getBaseRollupProof(makeBaseRollupInputs()); const job1 = await queue.getProvingJob(); expect(job1?.request.type).toEqual(ProvingRequestType.BASE_PARITY); @@ -40,10 +32,7 @@ describe('MemoryProvingQueue', () => { it('notifies of completion', async () => { const inputs = makeBaseParityInputs(); - const promise = queue.prove({ - inputs, - type: ProvingRequestType.BASE_PARITY, - }); + const promise = queue.getBaseParityProof(inputs); const job = await queue.getProvingJob(); expect(job?.request.inputs).toEqual(inputs); @@ -56,10 +45,7 @@ describe('MemoryProvingQueue', () => { it('notifies of errors', async () => { const inputs = makeBaseParityInputs(); - const promise = queue.prove({ - inputs, - type: ProvingRequestType.BASE_PARITY, - }); + const promise = queue.getBaseParityProof(inputs); const job = await queue.getProvingJob(); expect(job?.request.inputs).toEqual(inputs); diff --git a/yarn-project/prover-client/src/prover-pool/memory-proving-queue.ts b/yarn-project/prover-client/src/prover-pool/memory-proving-queue.ts index 155b548ff2e..ce8fd7f2057 100644 --- a/yarn-project/prover-client/src/prover-pool/memory-proving-queue.ts +++ b/yarn-project/prover-client/src/prover-pool/memory-proving-queue.ts @@ -1,17 +1,38 @@ +import { + type ProvingJob, + type ProvingJobSource, + type ProvingRequest, + type ProvingRequestResult, + ProvingRequestType, + type PublicKernelNonTailRequest, + type PublicKernelTailRequest, +} from '@aztec/circuit-types'; +import type { + BaseOrMergeRollupPublicInputs, + BaseParityInputs, + BaseRollupInputs, + KernelCircuitPublicInputs, + MergeRollupInputs, + ParityPublicInputs, + Proof, + PublicKernelCircuitPublicInputs, + RootParityInputs, + RootRollupInputs, + RootRollupPublicInputs, +} from '@aztec/circuits.js'; import { TimeoutError } from '@aztec/foundation/error'; import { MemoryFifo } from '@aztec/foundation/fifo'; import { createDebugLogger } from '@aztec/foundation/log'; import { type PromiseWithResolvers, promiseWithResolvers } from '@aztec/foundation/promise'; -import { type ProvingJob, type ProvingQueue } from './proving-queue.js'; -import { type ProvingRequest, type ProvingRequestResult, ProvingRequestType } from './proving-request.js'; +import { type CircuitProver } from '../prover/interface.js'; type ProvingJobWithResolvers = { id: string; request: T; } & PromiseWithResolvers>; -export class MemoryProvingQueue implements ProvingQueue { +export class MemoryProvingQueue implements CircuitProver, ProvingJobSource { private jobId = 0; private log = createDebugLogger('aztec:prover-client:prover-pool:queue'); private queue = new MemoryFifo(); @@ -60,7 +81,7 @@ export class MemoryProvingQueue implements ProvingQueue { return Promise.resolve(); } - prove(request: T): Promise> { + private enqueue(request: T): Promise> { const { promise, resolve, reject } = promiseWithResolvers>(); const item: ProvingJobWithResolvers = { id: String(this.jobId++), @@ -79,8 +100,90 @@ export class MemoryProvingQueue implements ProvingQueue { return promise; } - cancelAll(): void { - this.queue.cancel(); - this.queue = new MemoryFifo(); + /** + * Creates a proof for the given input. + * @param input - Input to the circuit. + */ + getBaseParityProof(inputs: BaseParityInputs): Promise<[ParityPublicInputs, Proof]> { + return this.enqueue({ + type: ProvingRequestType.BASE_PARITY, + inputs, + }); + } + + /** + * Creates a proof for the given input. + * @param input - Input to the circuit. + */ + getRootParityProof(inputs: RootParityInputs): Promise<[ParityPublicInputs, Proof]> { + return this.enqueue({ + type: ProvingRequestType.ROOT_PARITY, + inputs, + }); + } + + /** + * Creates a proof for the given input. + * @param input - Input to the circuit. + */ + getBaseRollupProof(input: BaseRollupInputs): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { + return this.enqueue({ + type: ProvingRequestType.BASE_ROLLUP, + inputs: input, + }); + } + + /** + * Creates a proof for the given input. + * @param input - Input to the circuit. + */ + getMergeRollupProof(input: MergeRollupInputs): Promise<[BaseOrMergeRollupPublicInputs, Proof]> { + return this.enqueue({ + type: ProvingRequestType.MERGE_ROLLUP, + inputs: input, + }); + } + + /** + * Creates a proof for the given input. + * @param input - Input to the circuit. + */ + getRootRollupProof(input: RootRollupInputs): Promise<[RootRollupPublicInputs, Proof]> { + return this.enqueue({ + type: ProvingRequestType.ROOT_ROLLUP, + inputs: input, + }); + } + + /** + * Create a public kernel proof. + * @param kernelRequest - Object containing the details of the proof required + */ + getPublicKernelProof(kernelRequest: PublicKernelNonTailRequest): Promise<[PublicKernelCircuitPublicInputs, Proof]> { + return this.enqueue({ + type: ProvingRequestType.PUBLIC_KERNEL_NON_TAIL, + kernelType: kernelRequest.type, + inputs: kernelRequest.inputs, + }); + } + + /** + * Create a public kernel tail proof. + * @param kernelRequest - Object containing the details of the proof required + */ + getPublicTailProof(kernelRequest: PublicKernelTailRequest): Promise<[KernelCircuitPublicInputs, Proof]> { + return this.enqueue({ + type: ProvingRequestType.PUBLIC_KERNEL_TAIL, + kernelType: kernelRequest.type, + inputs: kernelRequest.inputs, + }); + } + + /** + * Verifies a circuit proof + */ + verifyProof(): Promise { + // no-op + return Promise.resolve(); } } diff --git a/yarn-project/prover-client/src/prover-pool/circuit-prover-agent.test.ts b/yarn-project/prover-client/src/prover-pool/prover-agent.test.ts similarity index 70% rename from yarn-project/prover-client/src/prover-pool/circuit-prover-agent.test.ts rename to yarn-project/prover-client/src/prover-pool/prover-agent.test.ts index a5e8b224cc9..7c9bab82d9c 100644 --- a/yarn-project/prover-client/src/prover-pool/circuit-prover-agent.test.ts +++ b/yarn-project/prover-client/src/prover-pool/prover-agent.test.ts @@ -3,21 +3,18 @@ import { makeBaseParityInputs, makeParityPublicInputs, makeProof } from '@aztec/ import { type MockProxy, mock } from 'jest-mock-extended'; import { type CircuitProver } from '../prover/interface.js'; -import { CircuitProverAgent } from './circuit-prover-agent.js'; import { MemoryProvingQueue } from './memory-proving-queue.js'; -import { type ProvingAgent } from './prover-agent.js'; -import { type ProvingQueue } from './proving-queue.js'; -import { ProvingRequestType } from './proving-request.js'; +import { ProverAgent } from './prover-agent.js'; -describe('LocalProvingAgent', () => { - let queue: ProvingQueue; - let agent: ProvingAgent; +describe('ProverAgent', () => { + let queue: MemoryProvingQueue; + let agent: ProverAgent; let prover: MockProxy; beforeEach(() => { prover = mock(); queue = new MemoryProvingQueue(); - agent = new CircuitProverAgent(prover); + agent = new ProverAgent(prover); }); beforeEach(() => { @@ -34,11 +31,7 @@ describe('LocalProvingAgent', () => { prover.getBaseParityProof.mockResolvedValue([publicInputs, proof]); const inputs = makeBaseParityInputs(); - const promise = queue.prove({ - type: ProvingRequestType.BASE_PARITY, - inputs, - }); - + const promise = queue.getBaseParityProof(inputs); await expect(promise).resolves.toEqual([publicInputs, proof]); expect(prover.getBaseParityProof).toHaveBeenCalledWith(inputs); }); @@ -48,10 +41,7 @@ describe('LocalProvingAgent', () => { prover.getBaseParityProof.mockRejectedValue(error); const inputs = makeBaseParityInputs(); - const promise = queue.prove({ - type: ProvingRequestType.BASE_PARITY, - inputs, - }); + const promise = queue.getBaseParityProof(inputs); await expect(promise).rejects.toEqual(error); expect(prover.getBaseParityProof).toHaveBeenCalledWith(inputs); @@ -63,18 +53,12 @@ describe('LocalProvingAgent', () => { prover.getBaseParityProof.mockResolvedValue([publicInputs, proof]); const inputs = makeBaseParityInputs(); - const promise1 = queue.prove({ - type: ProvingRequestType.BASE_PARITY, - inputs, - }); + const promise1 = queue.getBaseParityProof(inputs); await expect(promise1).resolves.toEqual([publicInputs, proof]); const inputs2 = makeBaseParityInputs(); - const promise2 = queue.prove({ - type: ProvingRequestType.BASE_PARITY, - inputs: inputs2, - }); + const promise2 = queue.getBaseParityProof(inputs2); await expect(promise2).resolves.toEqual([publicInputs, proof]); diff --git a/yarn-project/prover-client/src/prover-pool/prover-agent.ts b/yarn-project/prover-client/src/prover-pool/prover-agent.ts index 6d408e3a074..030bcc35f90 100644 --- a/yarn-project/prover-client/src/prover-pool/prover-agent.ts +++ b/yarn-project/prover-client/src/prover-pool/prover-agent.ts @@ -1,15 +1,110 @@ -import { type ProvingQueueConsumer } from './proving-queue.js'; - -/** An agent that reads proving jobs from the queue, creates the proof and submits back the result */ -export interface ProvingAgent { - /** - * Starts the agent to read proving jobs from the queue. - * @param queue - The queue to read proving jobs from. - */ - start(queue: ProvingQueueConsumer): void; - - /** - * Stops the agent. Does nothing if the agent is not running. - */ - stop(): Promise; +import { + type ProvingJobSource, + type ProvingRequest, + type ProvingRequestResult, + ProvingRequestType, +} from '@aztec/circuit-types'; +import { makeEmptyProof } from '@aztec/circuits.js'; +import { createDebugLogger } from '@aztec/foundation/log'; +import { RunningPromise } from '@aztec/foundation/running-promise'; +import { elapsed } from '@aztec/foundation/timer'; + +import { type CircuitProver } from '../prover/interface.js'; + +export class ProverAgent { + private runningPromise?: RunningPromise; + + constructor( + /** The prover implementation to defer jobs to */ + private prover: CircuitProver, + /** How long to wait between jobs */ + private intervalMs = 10, + /** A name for this agent (if there are multiple agents running) */ + name = '', + private log = createDebugLogger('aztec:prover-client:prover-pool:agent' + (name ? `:${name}` : '')), + ) {} + + start(queue: ProvingJobSource): void { + if (this.runningPromise) { + throw new Error('Agent is already running'); + } + + this.runningPromise = new RunningPromise(async () => { + const job = await queue.getProvingJob(); + if (!job) { + return; + } + + try { + const [time, result] = await elapsed(() => this.work(job.request)); + await queue.resolveProvingJob(job.id, result); + this.log.info( + `Processed proving job id=${job.id} type=${ProvingRequestType[job.request.type]} duration=${time}ms`, + ); + } catch (err) { + this.log.error( + `Error processing proving job id=${job.id} type=${ProvingRequestType[job.request.type]}: ${err}`, + ); + await queue.rejectProvingJob(job.id, err as Error); + } + }, this.intervalMs); + + this.runningPromise.start(); + } + + async stop(): Promise { + if (!this.runningPromise) { + throw new Error('Agent is not running'); + } + + await this.runningPromise.stop(); + this.runningPromise = undefined; + } + + private work(request: ProvingRequest): Promise> { + const { type, inputs } = request; + switch (type) { + case ProvingRequestType.PUBLIC_VM: { + return Promise.resolve([{}, makeEmptyProof()] as const); + } + + case ProvingRequestType.PUBLIC_KERNEL_NON_TAIL: { + return this.prover.getPublicKernelProof({ + type: request.kernelType, + inputs, + }); + } + + case ProvingRequestType.PUBLIC_KERNEL_TAIL: { + return this.prover.getPublicTailProof({ + type: request.kernelType, + inputs, + }); + } + + case ProvingRequestType.BASE_ROLLUP: { + return this.prover.getBaseRollupProof(inputs); + } + + case ProvingRequestType.MERGE_ROLLUP: { + return this.prover.getMergeRollupProof(inputs); + } + + case ProvingRequestType.ROOT_ROLLUP: { + return this.prover.getRootRollupProof(inputs); + } + + case ProvingRequestType.BASE_PARITY: { + return this.prover.getBaseParityProof(inputs); + } + + case ProvingRequestType.ROOT_PARITY: { + return this.prover.getRootParityProof(inputs); + } + + default: { + return Promise.reject(new Error(`Invalid proof request type: ${type}`)); + } + } + } } diff --git a/yarn-project/prover-client/src/prover-pool/prover-pool.ts b/yarn-project/prover-client/src/prover-pool/prover-pool.ts index defeed61ce7..407b0a19fe6 100644 --- a/yarn-project/prover-client/src/prover-pool/prover-pool.ts +++ b/yarn-project/prover-client/src/prover-pool/prover-pool.ts @@ -1,21 +1,19 @@ -import { MemoryProvingQueue } from './memory-proving-queue.js'; -import { type ProvingAgent } from './prover-agent.js'; -import { type ProvingQueue } from './proving-queue.js'; +import { type ProvingJobSource } from '@aztec/circuit-types'; +import { type SimulationProvider } from '@aztec/simulator'; + +import { TestCircuitProver } from '../prover/test_circuit_prover.js'; +import { ProverAgent } from './prover-agent.js'; /** * Utility class that spawns N prover agents all connected to the same queue */ export class ProverPool { - private agents: ProvingAgent[] = []; + private agents: ProverAgent[] = []; private running = false; - constructor( - private size: number, - private agentFactory: (i: number) => ProvingAgent | Promise, - public readonly queue: ProvingQueue = new MemoryProvingQueue(), - ) {} + constructor(private size: number, private agentFactory: (i: number) => ProverAgent | Promise) {} - async start(): Promise { + async start(source: ProvingJobSource): Promise { if (this.running) { throw new Error('Prover pool is already running'); } @@ -29,7 +27,7 @@ export class ProverPool { } for (const agent of this.agents) { - agent.start(this.queue); + agent.start(source); } } @@ -44,4 +42,11 @@ export class ProverPool { this.running = false; } + + static testPool(simulationProvider: SimulationProvider, size = 1, agentPollIntervalMS = 10): ProverPool { + return new ProverPool( + size, + i => new ProverAgent(new TestCircuitProver(simulationProvider), agentPollIntervalMS, `${i}`), + ); + } } diff --git a/yarn-project/prover-client/src/prover-pool/proving-queue.ts b/yarn-project/prover-client/src/prover-pool/proving-queue.ts deleted file mode 100644 index 3ab8b015345..00000000000 --- a/yarn-project/prover-client/src/prover-pool/proving-queue.ts +++ /dev/null @@ -1,23 +0,0 @@ -import type { ProvingRequest, ProvingRequestResult, ProvingRequestType } from './proving-request.js'; - -export type GetJobOptions = { - timeoutSec?: number; -}; - -export type ProvingJob = { - id: string; - request: T; -}; - -export interface ProvingRequestProducer { - prove(request: T): Promise>; - cancelAll(): void; -} - -export interface ProvingQueueConsumer { - getProvingJob(options?: GetJobOptions): Promise | null>; - resolveProvingJob(jobId: string, result: ProvingRequestResult): Promise; - rejectProvingJob(jobId: string, reason: Error): Promise; -} - -export interface ProvingQueue extends ProvingQueueConsumer, ProvingRequestProducer {} diff --git a/yarn-project/prover-client/src/tx-prover/tx-prover.ts b/yarn-project/prover-client/src/tx-prover/tx-prover.ts index 14f36024a39..90daea5a7fa 100644 --- a/yarn-project/prover-client/src/tx-prover/tx-prover.ts +++ b/yarn-project/prover-client/src/tx-prover/tx-prover.ts @@ -7,7 +7,8 @@ import { type WorldStateSynchronizer } from '@aztec/world-state'; import { type ProverConfig } from '../config.js'; import { type VerificationKeys, getVerificationKeys } from '../mocks/verification_keys.js'; import { ProvingOrchestrator } from '../orchestrator/orchestrator.js'; -import { CircuitProverAgent } from '../prover-pool/circuit-prover-agent.js'; +import { MemoryProvingQueue } from '../prover-pool/memory-proving-queue.js'; +import { ProverAgent } from '../prover-pool/prover-agent.js'; import { ProverPool } from '../prover-pool/prover-pool.js'; import { TestCircuitProver } from '../prover/test_circuit_prover.js'; @@ -17,6 +18,7 @@ import { TestCircuitProver } from '../prover/test_circuit_prover.js'; export class TxProver implements ProverClient { private orchestrator: ProvingOrchestrator; private proverPool: ProverPool; + private queue = new MemoryProvingQueue(); constructor( private worldStateSynchronizer: WorldStateSynchronizer, @@ -27,17 +29,17 @@ export class TxProver implements ProverClient { ) { this.proverPool = new ProverPool( agentCount, - i => new CircuitProverAgent(new TestCircuitProver(simulationProvider), agentPollIntervalMS, `${i}`), + i => new ProverAgent(new TestCircuitProver(simulationProvider), agentPollIntervalMS, `${i}`), ); - this.orchestrator = new ProvingOrchestrator(worldStateSynchronizer.getLatest(), this.proverPool.queue); + this.orchestrator = new ProvingOrchestrator(worldStateSynchronizer.getLatest(), this.queue); } /** * Starts the prover instance */ public async start() { - await this.proverPool.start(); + await this.proverPool.start(this.queue); } /** diff --git a/yarn-project/pxe/tsconfig.json b/yarn-project/pxe/tsconfig.json index e32a4d6aa27..a9b056c037d 100644 --- a/yarn-project/pxe/tsconfig.json +++ b/yarn-project/pxe/tsconfig.json @@ -6,6 +6,9 @@ "tsBuildInfoFile": ".tsbuildinfo" }, "references": [ + { + "path": "../builder" + }, { "path": "../circuit-types" }, @@ -24,9 +27,6 @@ { "path": "../kv-store" }, - { - "path": "../builder" - }, { "path": "../noir-protocol-circuits-types" }, From 4b563cd79f16f513a05c1595a4f2673cdaa7600a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Bene=C5=A1?= Date: Thu, 25 Apr 2024 11:36:45 +0200 Subject: [PATCH 020/201] feat: AES oracle padding (#6013) --- .../aztec-nr/aztec/src/oracle/encryption.nr | 6 ++-- .../contracts/test_contract/src/main.nr | 10 +++++-- .../end-to-end/src/e2e_encryption.test.ts | 30 ++++++++++++++----- 3 files changed, 33 insertions(+), 13 deletions(-) diff --git a/noir-projects/aztec-nr/aztec/src/oracle/encryption.nr b/noir-projects/aztec-nr/aztec/src/oracle/encryption.nr index cb655c756ce..c8084432437 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/encryption.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/encryption.nr @@ -1,7 +1,9 @@ #[oracle(aes128Encrypt)] -pub fn aes128_encrypt_oracle(input: [u8; N], iv: [u8; 16], key: [u8; 16]) -> [u8; N] {} +pub fn aes128_encrypt_oracle(input: [u8; N], iv: [u8; 16], key: [u8; 16]) -> [u8; M] {} -unconstrained pub fn aes128_encrypt(input: [u8; N], iv: [u8; 16], key: [u8; 16]) -> [u8; N] { +// AES 128 CBC with PKCS7 is padding to multiples of 16 bytes so M has to be a multiple of 16! +// (e.g. from 65 bytes long input you get 80 bytes long output and M has to be set to `80`) +unconstrained pub fn aes128_encrypt(input: [u8; N], iv: [u8; 16], key: [u8; 16]) -> [u8; M] { aes128_encrypt_oracle(input, iv, key) } diff --git a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr index 107df25dc72..1c14b54c96a 100644 --- a/noir-projects/noir-contracts/contracts/test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test_contract/src/main.nr @@ -310,9 +310,13 @@ contract Test { } #[aztec(private)] - fn encrypt(input: [u8; 64], iv: [u8; 16], key: [u8; 16]) { - let result = aes128_encrypt(input, iv, key); - context.emit_unencrypted_log(result); + fn encrypt(input: [u8; 64], iv: [u8; 16], key: [u8; 16]) -> [u8; 64] { + aes128_encrypt(input, iv, key) + } + + #[aztec(private)] + fn encrypt_with_padding(input: [u8; 65], iv: [u8; 16], key: [u8; 16]) -> [u8; 80] { + aes128_encrypt(input, iv, key) } #[aztec(public)] diff --git a/yarn-project/end-to-end/src/e2e_encryption.test.ts b/yarn-project/end-to-end/src/e2e_encryption.test.ts index 861a7c573c3..9206c566aea 100644 --- a/yarn-project/end-to-end/src/e2e_encryption.test.ts +++ b/yarn-project/end-to-end/src/e2e_encryption.test.ts @@ -21,21 +21,35 @@ describe('e2e_encryption', () => { afterAll(() => teardown()); - it('encrypts', async () => { + it('encrypts 🔒📄🔑💻', async () => { const input = randomBytes(64); const iv = randomBytes(16); const key = randomBytes(16); const expectedCiphertext = aes128.encryptBufferCBC(input, iv, key); - const logs = await contract.methods + const ciphertextAsBigInts = await contract.methods .encrypt(Array.from(input), Array.from(iv), Array.from(key)) - .send() - .getUnencryptedLogs(); - // Each byte of encrypted data is in its own field and it's all serialized into a long buffer so we simply extract - // each 32nd byte from the buffer to get the encrypted data - const recoveredCiphertext = logs.logs[0].log.data.filter((_, i) => (i + 1) % 32 === 0); + .simulate(); + const ciphertext = Buffer.from(ciphertextAsBigInts.map((x: bigint) => Number(x))); - expect(recoveredCiphertext).toEqual(expectedCiphertext); + expect(ciphertext).toEqual(expectedCiphertext); + }); + + it('encrypts with padding 🔒📄🔑💻 ➕ 📦', async () => { + const input = randomBytes(65); + const iv = randomBytes(16); + const key = randomBytes(16); + + const expectedCiphertext = aes128.encryptBufferCBC(input, iv, key); + // AES 128 CBC with PKCS7 is padding to multiples of 16 bytes so from 65 bytes long input we get 80 bytes long output + expect(expectedCiphertext.length).toBe(80); + + const ciphertextAsBigInts = await contract.methods + .encrypt_with_padding(Array.from(input), Array.from(iv), Array.from(key)) + .simulate(); + const ciphertext = Buffer.from(ciphertextAsBigInts.map((x: bigint) => Number(x))); + + expect(ciphertext).toEqual(expectedCiphertext); }); }); From 61e61aba60ec02d12141ef396c4e827c800d57bf Mon Sep 17 00:00:00 2001 From: just-mitch <68168980+just-mitch@users.noreply.github.com> Date: Thu, 25 Apr 2024 06:08:08 -0600 Subject: [PATCH 021/201] chore: rename capture to end_setup (#6008) rename capture_min_revertible_side_effect_counter to end_setup --- noir-projects/aztec-nr/authwit/src/account.nr | 2 +- noir-projects/aztec-nr/aztec/src/context/private_context.nr | 2 +- .../contracts/app_subscription_contract/src/main.nr | 2 +- .../private-kernel-lib/src/private_kernel_tail_to_public.nr | 6 +++--- .../crates/types/src/tests/fixture_builder.nr | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/noir-projects/aztec-nr/authwit/src/account.nr b/noir-projects/aztec-nr/authwit/src/account.nr index 2e33c4a0823..eb926315a65 100644 --- a/noir-projects/aztec-nr/authwit/src/account.nr +++ b/noir-projects/aztec-nr/authwit/src/account.nr @@ -62,7 +62,7 @@ impl AccountActions { let fee_hash = fee_payload.hash(); assert(valid_fn(private_context, fee_hash)); fee_payload.execute_calls(private_context); - private_context.capture_min_revertible_side_effect_counter(); + private_context.end_setup(); let app_hash = app_payload.hash(); assert(valid_fn(private_context, app_hash)); diff --git a/noir-projects/aztec-nr/aztec/src/context/private_context.nr b/noir-projects/aztec-nr/aztec/src/context/private_context.nr index c161ae99399..43006332b78 100644 --- a/noir-projects/aztec-nr/aztec/src/context/private_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/private_context.nr @@ -192,7 +192,7 @@ impl PrivateContext { priv_circuit_pub_inputs } - pub fn capture_min_revertible_side_effect_counter(&mut self) { + pub fn end_setup(&mut self) { self.min_revertible_side_effect_counter = self.side_effect_counter; } diff --git a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr index eb316f30186..bab4f24cb51 100644 --- a/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/app_subscription_contract/src/main.nr @@ -49,7 +49,7 @@ contract AppSubscription { GasToken::at(storage.gas_token_address.read_private()).pay_fee(42).enqueue(&mut context); - context.capture_min_revertible_side_effect_counter(); + context.end_setup(); AppSubscription::at(context.this_address()).assert_not_expired(note.expiry_block_number).enqueue(&mut context); diff --git a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr index bdef6ba568d..c19f24bdee4 100644 --- a/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr +++ b/noir-projects/noir-protocol-circuits/crates/private-kernel-lib/src/private_kernel_tail_to_public.nr @@ -465,7 +465,7 @@ mod tests { let mut builder = PrivateKernelTailToPublicInputsBuilder::new(); // expect 3 non-revertible nullifiers: the tx nullifier + 2 new ones builder.previous_kernel.append_new_nullifiers(2); - builder.previous_kernel.capture_min_revertible_side_effect_counter(); + builder.previous_kernel.end_setup(); // expect 2 revertible nullifiers builder.previous_kernel.append_new_nullifiers(2); @@ -494,7 +494,7 @@ mod tests { // expect 2 non-revertible commitments builder.previous_kernel.append_new_note_hashes(2); - builder.previous_kernel.capture_min_revertible_side_effect_counter(); + builder.previous_kernel.end_setup(); // expect 2 revertible commitments builder.previous_kernel.append_new_note_hashes(2); @@ -525,7 +525,7 @@ mod tests { // add one hash in non-revertible part builder.previous_kernel.append_new_note_hashes(1); - builder.previous_kernel.capture_min_revertible_side_effect_counter(); + builder.previous_kernel.end_setup(); // nullify it in revertible part builder.previous_kernel.append_new_nullifiers(1); diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr index eb50167d527..a70de06ebfa 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixture_builder.nr @@ -377,7 +377,7 @@ impl FixtureBuilder { self.public_call_stack.push(call_stack_item); } - pub fn capture_min_revertible_side_effect_counter(&mut self) { + pub fn end_setup(&mut self) { self.min_revertible_side_effect_counter = self.counter; } From 703e0c1e2c2a5703410ff5fd4c1a135131254a53 Mon Sep 17 00:00:00 2001 From: Alex Gherghisan Date: Thu, 25 Apr 2024 13:13:51 +0100 Subject: [PATCH 022/201] fix: bigint corruption in lmdb (#6002) An attempt at fixing the bigint decoding corruption that we've seen sporradically in CI. The fix saves all of the deferred note ids linked to contract to an array before iterating them --- .../{flakey_e2e_2_pxes.test.ts => e2e_2_pxes.test.ts} | 6 +++--- yarn-project/end-to-end/src/fixtures/utils.ts | 9 +++++++++ yarn-project/pxe/src/database/kv_pxe_database.ts | 2 +- 3 files changed, 13 insertions(+), 4 deletions(-) rename yarn-project/end-to-end/src/{flakey_e2e_2_pxes.test.ts => e2e_2_pxes.test.ts} (99%) diff --git a/yarn-project/end-to-end/src/flakey_e2e_2_pxes.test.ts b/yarn-project/end-to-end/src/e2e_2_pxes.test.ts similarity index 99% rename from yarn-project/end-to-end/src/flakey_e2e_2_pxes.test.ts rename to yarn-project/end-to-end/src/e2e_2_pxes.test.ts index ea0b055b11a..95dcb88a2f4 100644 --- a/yarn-project/end-to-end/src/flakey_e2e_2_pxes.test.ts +++ b/yarn-project/end-to-end/src/e2e_2_pxes.test.ts @@ -29,6 +29,7 @@ describe('e2e_2_pxes', () => { let walletB: Wallet; let logger: DebugLogger; let teardownA: () => Promise; + let teardownB: () => Promise; beforeEach(async () => { ({ @@ -42,14 +43,13 @@ describe('e2e_2_pxes', () => { ({ pxe: pxeB, wallets: [walletB], + teardown: teardownB, } = await setupPXEService(1, aztecNode!, {}, undefined, true)); }, 100_000); afterEach(async () => { + await teardownB(); await teardownA(); - if ((pxeB as any).stop) { - await (pxeB as any).stop(); - } }); const awaitUserSynchronized = async (wallet: Wallet, owner: AztecAddress) => { diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index 802f3925c9d..bcfcf5a2aae 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -196,16 +196,25 @@ export async function setupPXEService( * Logger instance named as the current test. */ logger: DebugLogger; + /** + * Teardown function + */ + teardown: () => Promise; }> { const pxeServiceConfig = { ...getPXEServiceConfig(), ...opts }; const pxe = await createPXEService(aztecNode, pxeServiceConfig, useLogSuffix); const wallets = await createAccounts(pxe, numberOfAccounts); + const teardown = async () => { + await pxe.stop(); + }; + return { pxe, wallets, logger, + teardown, }; } diff --git a/yarn-project/pxe/src/database/kv_pxe_database.ts b/yarn-project/pxe/src/database/kv_pxe_database.ts index aadab576f8d..c07a29219de 100644 --- a/yarn-project/pxe/src/database/kv_pxe_database.ts +++ b/yarn-project/pxe/src/database/kv_pxe_database.ts @@ -189,7 +189,7 @@ export class KVPxeDatabase implements PxeDatabase { removeDeferredNotesByContract(contractAddress: AztecAddress): Promise { return this.#db.transaction(() => { const deferredNotes: DeferredNoteDao[] = []; - const indices = this.#deferredNotesByContract.getValues(contractAddress.toString()); + const indices = Array.from(this.#deferredNotesByContract.getValues(contractAddress.toString())); for (const index of indices) { const deferredNoteBuffer = this.#deferredNotes.at(index); From 4cee8e0644780e527395da452a831055ec41a4c7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nicol=C3=A1s=20Venturo?= Date: Thu, 25 Apr 2024 10:40:47 -0300 Subject: [PATCH 023/201] test: extend SharedMutable tests (#6005) Working on https://github.com/AztecProtocol/aztec-packages/pull/5963 I realized we were missing test cases for `get_scheduled_value_in_public`, so I added some. --- .../shared_mutable/shared_mutable.nr | 48 +++++++++++++++---- 1 file changed, 39 insertions(+), 9 deletions(-) diff --git a/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/shared_mutable.nr b/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/shared_mutable.nr index 5d327b1f25b..71cf0a85956 100644 --- a/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/shared_mutable.nr +++ b/noir-projects/aztec-nr/aztec/src/state_vars/shared_mutable/shared_mutable.nr @@ -129,14 +129,16 @@ mod test { global TEST_DELAY = 20; + global pre = 13; + global post = 42; + #[test] fn test_get_current_value_in_public_before_change() { let (state_var, block_number) = setup(false); let slot = state_var.get_derived_storage_slot(); - let (pre, post) = (13, 17); - // Change in the future + // Change in the future, current value is pre OracleMock::mock("storageRead").with_params((slot, 3)).returns([pre, post, block_number + 1]); assert_eq(state_var.get_current_value_in_public(), pre); } @@ -146,9 +148,8 @@ mod test { let (state_var, block_number) = setup(false); let slot = state_var.get_derived_storage_slot(); - let (pre, post) = (13, 17); - // Change in the current block + // Change in the current block, current value is post OracleMock::mock("storageRead").with_params((slot, 3)).returns([pre, post, block_number]); assert_eq(state_var.get_current_value_in_public(), post); } @@ -158,19 +159,50 @@ mod test { let (state_var, block_number) = setup(false); let slot = state_var.get_derived_storage_slot(); - let (pre, post) = (13, 17); - // Change in the past + // Change in the past, current value is post OracleMock::mock("storageRead").with_params((slot, 3)).returns([pre, post, block_number - 1]); assert_eq(state_var.get_current_value_in_public(), post); } + #[test] + fn test_get_scheduled_value_in_public_before_change() { + let (state_var, block_number) = setup(false); + + let slot = state_var.get_derived_storage_slot(); + + // Change in the future, scheduled is post (always is) + OracleMock::mock("storageRead").with_params((slot, 3)).returns([pre, post, block_number + 1]); + assert_eq(state_var.get_scheduled_value_in_public(), (post, (block_number + 1) as u32)); + } + + #[test] + fn test_get_scheduled_value_in_public_at_change() { + let (state_var, block_number) = setup(false); + + let slot = state_var.get_derived_storage_slot(); + + // Change in the current block, scheduled is post (always is) + OracleMock::mock("storageRead").with_params((slot, 3)).returns([pre, post, block_number]); + assert_eq(state_var.get_scheduled_value_in_public(), (post, block_number as u32)); + } + + #[test] + fn test_get_scheduled_value_in_public_after_change() { + let (state_var, block_number) = setup(false); + + let slot = state_var.get_derived_storage_slot(); + + // Change in the past, scheduled is post (always is) + OracleMock::mock("storageRead").with_params((slot, 3)).returns([pre, post, block_number - 1]); + assert_eq(state_var.get_scheduled_value_in_public(), (post, (block_number - 1) as u32)); + } + #[test] fn test_schedule_value_change_before_change() { let (state_var, block_number) = setup(false); let slot = state_var.get_derived_storage_slot(); - let (pre, post) = (13, 17); // Change in the future OracleMock::mock("storageRead").with_params((slot, 3)).returns([pre, post, block_number + 1]); @@ -189,7 +221,6 @@ mod test { let (state_var, block_number) = setup(false); let slot = state_var.get_derived_storage_slot(); - let (pre, post) = (13, 17); // Change in the current block OracleMock::mock("storageRead").with_params((slot, 3)).returns([pre, post, block_number]); @@ -208,7 +239,6 @@ mod test { let (state_var, block_number) = setup(false); let slot = state_var.get_derived_storage_slot(); - let (pre, post) = (13, 17); // Change in the past OracleMock::mock("storageRead").with_params((slot, 3)).returns([pre, post, block_number - 1]); From f2364d40f850414029ed967eb05c48b5be2ffff6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Bene=C5=A1?= Date: Thu, 25 Apr 2024 16:47:21 +0200 Subject: [PATCH 024/201] refactor: improved naming in `TxExecutionRequest` (#6014) --- .../contract/contract_function_interaction.ts | 4 ++-- .../default_multi_call_entrypoint.ts | 4 ++-- .../circuit-types/src/packed_values.ts | 15 +++--------- .../circuit-types/src/tx_execution_request.ts | 23 ++++++++++--------- .../entrypoints/src/account_entrypoint.ts | 4 ++-- .../entrypoints/src/dapp_entrypoint.ts | 4 ++-- .../src/pxe_service/test/pxe_test_suite.ts | 4 ++-- .../src/client/private_execution.test.ts | 4 ++-- .../simulator/src/client/simulator.ts | 4 ++-- 9 files changed, 29 insertions(+), 37 deletions(-) diff --git a/yarn-project/aztec.js/src/contract/contract_function_interaction.ts b/yarn-project/aztec.js/src/contract/contract_function_interaction.ts index 6d34219e5e6..6d194bfa9f1 100644 --- a/yarn-project/aztec.js/src/contract/contract_function_interaction.ts +++ b/yarn-project/aztec.js/src/contract/contract_function_interaction.ts @@ -91,11 +91,11 @@ export class ContractFunctionInteraction extends BaseContractInteraction { const gasSettings = options.gasSettings ?? GasSettings.simulation(); const txRequest = TxExecutionRequest.from({ - argsHash: packedArgs.hash, + firstCallArgsHash: packedArgs.hash, origin: this.contractAddress, functionData: FunctionData.fromAbi(this.functionDao), txContext: new TxContext(nodeInfo.chainId, nodeInfo.protocolVersion, gasSettings), - packedArguments: [packedArgs], + argsOfCalls: [packedArgs], authWitnesses: [], }); const simulatedTx = await this.pxe.simulateTx(txRequest, true, options.from ?? this.wallet.getAddress()); diff --git a/yarn-project/aztec.js/src/entrypoint/default_multi_call_entrypoint.ts b/yarn-project/aztec.js/src/entrypoint/default_multi_call_entrypoint.ts index 389f2d43237..167dfac2501 100644 --- a/yarn-project/aztec.js/src/entrypoint/default_multi_call_entrypoint.ts +++ b/yarn-project/aztec.js/src/entrypoint/default_multi_call_entrypoint.ts @@ -22,11 +22,11 @@ export class DefaultMultiCallEntrypoint implements EntrypointInterface { const gasSettings = executions.fee?.gasSettings ?? GasSettings.default(); const txRequest = TxExecutionRequest.from({ - argsHash: entrypointPackedArgs.hash, + firstCallArgsHash: entrypointPackedArgs.hash, origin: this.address, functionData: FunctionData.fromAbi(abi), txContext: new TxContext(this.chainId, this.version, gasSettings), - packedArguments: [...payload.packedArguments, ...packedArguments, entrypointPackedArgs], + argsOfCalls: [...payload.packedArguments, ...packedArguments, entrypointPackedArgs], authWitnesses, }); diff --git a/yarn-project/circuit-types/src/packed_values.ts b/yarn-project/circuit-types/src/packed_values.ts index c6dbe93b12b..7b3a4ce54ec 100644 --- a/yarn-project/circuit-types/src/packed_values.ts +++ b/yarn-project/circuit-types/src/packed_values.ts @@ -1,31 +1,22 @@ import { Fr, Vector } from '@aztec/circuits.js'; import { computeVarArgsHash } from '@aztec/circuits.js/hash'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; -import { type FieldsOf } from '@aztec/foundation/types'; /** * Packs a set of values into a hash. */ export class PackedValues { - constructor( + private constructor( /** * Raw values. */ - public values: Fr[], + public readonly values: Fr[], /** * The hash of the raw values */ - public hash: Fr, + public readonly hash: Fr, ) {} - static getFields(fields: FieldsOf) { - return [fields.values, fields.hash] as const; - } - - static from(fields: FieldsOf): PackedValues { - return new PackedValues(...PackedValues.getFields(fields)); - } - static fromValues(values: Fr[]) { return new PackedValues(values, computeVarArgsHash(values)); } diff --git a/yarn-project/circuit-types/src/tx_execution_request.ts b/yarn-project/circuit-types/src/tx_execution_request.ts index eba0eda27d8..9d7101877dc 100644 --- a/yarn-project/circuit-types/src/tx_execution_request.ts +++ b/yarn-project/circuit-types/src/tx_execution_request.ts @@ -20,19 +20,20 @@ export class TxExecutionRequest { */ public functionData: FunctionData, /** - * The hash of the entry point arguments. + * The hash of arguments of first call to be executed (usually account entrypoint). + * @dev This hash is a pointer to `argsOfCalls` unordered array. */ - public argsHash: Fr, + public firstCallArgsHash: Fr, /** * Transaction context. */ public txContext: TxContext, /** - * These packed arguments will be used during transaction simulation. - * For example, a call to an account contract might contain as many packed arguments - * as relayed function calls, and one for the entrypoint. + * An unordered array of packed arguments for each call in the transaction. + * @dev These arguments are accessed in Noir via oracle and constrained against the args hash. The length of + * the array is equal to the number of function calls in the transaction (1 args per 1 call). */ - public packedArguments: PackedValues[], + public argsOfCalls: PackedValues[], /** * Transient authorization witnesses for authorizing the execution of one or more actions during this tx. * These witnesses are not expected to be stored in the local witnesses database of the PXE. @@ -41,16 +42,16 @@ export class TxExecutionRequest { ) {} toTxRequest(): TxRequest { - return new TxRequest(this.origin, this.functionData, this.argsHash, this.txContext); + return new TxRequest(this.origin, this.functionData, this.firstCallArgsHash, this.txContext); } static getFields(fields: FieldsOf) { return [ fields.origin, fields.functionData, - fields.argsHash, + fields.firstCallArgsHash, fields.txContext, - fields.packedArguments, + fields.argsOfCalls, fields.authWitnesses, ] as const; } @@ -67,9 +68,9 @@ export class TxExecutionRequest { return serializeToBuffer( this.origin, this.functionData, - this.argsHash, + this.firstCallArgsHash, this.txContext, - new Vector(this.packedArguments), + new Vector(this.argsOfCalls), new Vector(this.authWitnesses), ); } diff --git a/yarn-project/entrypoints/src/account_entrypoint.ts b/yarn-project/entrypoints/src/account_entrypoint.ts index 2b9a867578a..b750f3232f0 100644 --- a/yarn-project/entrypoints/src/account_entrypoint.ts +++ b/yarn-project/entrypoints/src/account_entrypoint.ts @@ -31,11 +31,11 @@ export class DefaultAccountEntrypoint implements EntrypointInterface { const feeAuthWitness = await this.auth.createAuthWit(feePayload.hash()); const txRequest = TxExecutionRequest.from({ - argsHash: entrypointPackedArgs.hash, + firstCallArgsHash: entrypointPackedArgs.hash, origin: this.address, functionData: FunctionData.fromAbi(abi), txContext: new TxContext(this.chainId, this.version, gasSettings), - packedArguments: [...appPayload.packedArguments, ...feePayload.packedArguments, entrypointPackedArgs], + argsOfCalls: [...appPayload.packedArguments, ...feePayload.packedArguments, entrypointPackedArgs], authWitnesses: [appAuthWitness, feeAuthWitness], }); diff --git a/yarn-project/entrypoints/src/dapp_entrypoint.ts b/yarn-project/entrypoints/src/dapp_entrypoint.ts index 22e16aeff71..b80307dfc3a 100644 --- a/yarn-project/entrypoints/src/dapp_entrypoint.ts +++ b/yarn-project/entrypoints/src/dapp_entrypoint.ts @@ -44,11 +44,11 @@ export class DefaultDappEntrypoint implements EntrypointInterface { const authWitness = await this.userAuthWitnessProvider.createAuthWit(outerHash); const txRequest = TxExecutionRequest.from({ - argsHash: entrypointPackedArgs.hash, + firstCallArgsHash: entrypointPackedArgs.hash, origin: this.dappEntrypointAddress, functionData, txContext: new TxContext(this.chainId, this.version, gasSettings), - packedArguments: [...payload.packedArguments, entrypointPackedArgs], + argsOfCalls: [...payload.packedArguments, entrypointPackedArgs], authWitnesses: [authWitness], }); diff --git a/yarn-project/pxe/src/pxe_service/test/pxe_test_suite.ts b/yarn-project/pxe/src/pxe_service/test/pxe_test_suite.ts index 9b17eefc01c..75d2f11b142 100644 --- a/yarn-project/pxe/src/pxe_service/test/pxe_test_suite.ts +++ b/yarn-project/pxe/src/pxe_service/test/pxe_test_suite.ts @@ -125,10 +125,10 @@ export const pxeTestSuite = (testName: string, pxeSetup: () => Promise) => functionData.isPrivate = false; const txExecutionRequest = TxExecutionRequest.from({ origin: AztecAddress.random(), - argsHash: new Fr(0), + firstCallArgsHash: new Fr(0), functionData, txContext: TxContext.empty(), - packedArguments: [], + argsOfCalls: [], authWitnesses: [], }); diff --git a/yarn-project/simulator/src/client/private_execution.test.ts b/yarn-project/simulator/src/client/private_execution.test.ts index b34939ce8ee..ba7163432b2 100644 --- a/yarn-project/simulator/src/client/private_execution.test.ts +++ b/yarn-project/simulator/src/client/private_execution.test.ts @@ -113,10 +113,10 @@ describe('Private Execution test suite', () => { const functionData = FunctionData.fromAbi(artifact); const txRequest = TxExecutionRequest.from({ origin: contractAddress, - argsHash: packedArguments.hash, + firstCallArgsHash: packedArguments.hash, functionData, txContext: TxContext.from({ ...txContextFields, ...txContext }), - packedArguments: [packedArguments], + argsOfCalls: [packedArguments], authWitnesses: [], }); diff --git a/yarn-project/simulator/src/client/simulator.ts b/yarn-project/simulator/src/client/simulator.ts index cc9d58a0b4e..a0499982a29 100644 --- a/yarn-project/simulator/src/client/simulator.ts +++ b/yarn-project/simulator/src/client/simulator.ts @@ -96,12 +96,12 @@ export class AcirSimulator { ); const context = new ClientExecutionContext( contractAddress, - request.argsHash, + request.firstCallArgsHash, request.txContext, callContext, header, request.authWitnesses, - PackedValuesCache.create(request.packedArguments), + PackedValuesCache.create(request.argsOfCalls), new ExecutionNoteCache(), this.db, curve, From 087a624689ca34de4ac6dca759cf5e644a163b37 Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Thu, 25 Apr 2024 11:52:34 -0300 Subject: [PATCH 025/201] chore(ci): Reenable deploy tests (#6011) Removing from the flakey set. --- .../contract_class_registration.test.ts | 0 .../deploy_method.test.ts | 0 .../deploy_test.ts | 0 .../legacy.test.ts | 0 .../private_initialization.test.ts | 0 .../regressions.test.ts | 0 6 files changed, 0 insertions(+), 0 deletions(-) rename yarn-project/end-to-end/src/{flakey_e2e_deploy_contract => e2e_deploy_contract}/contract_class_registration.test.ts (100%) rename yarn-project/end-to-end/src/{flakey_e2e_deploy_contract => e2e_deploy_contract}/deploy_method.test.ts (100%) rename yarn-project/end-to-end/src/{flakey_e2e_deploy_contract => e2e_deploy_contract}/deploy_test.ts (100%) rename yarn-project/end-to-end/src/{flakey_e2e_deploy_contract => e2e_deploy_contract}/legacy.test.ts (100%) rename yarn-project/end-to-end/src/{flakey_e2e_deploy_contract => e2e_deploy_contract}/private_initialization.test.ts (100%) rename yarn-project/end-to-end/src/{flakey_e2e_deploy_contract => e2e_deploy_contract}/regressions.test.ts (100%) diff --git a/yarn-project/end-to-end/src/flakey_e2e_deploy_contract/contract_class_registration.test.ts b/yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts similarity index 100% rename from yarn-project/end-to-end/src/flakey_e2e_deploy_contract/contract_class_registration.test.ts rename to yarn-project/end-to-end/src/e2e_deploy_contract/contract_class_registration.test.ts diff --git a/yarn-project/end-to-end/src/flakey_e2e_deploy_contract/deploy_method.test.ts b/yarn-project/end-to-end/src/e2e_deploy_contract/deploy_method.test.ts similarity index 100% rename from yarn-project/end-to-end/src/flakey_e2e_deploy_contract/deploy_method.test.ts rename to yarn-project/end-to-end/src/e2e_deploy_contract/deploy_method.test.ts diff --git a/yarn-project/end-to-end/src/flakey_e2e_deploy_contract/deploy_test.ts b/yarn-project/end-to-end/src/e2e_deploy_contract/deploy_test.ts similarity index 100% rename from yarn-project/end-to-end/src/flakey_e2e_deploy_contract/deploy_test.ts rename to yarn-project/end-to-end/src/e2e_deploy_contract/deploy_test.ts diff --git a/yarn-project/end-to-end/src/flakey_e2e_deploy_contract/legacy.test.ts b/yarn-project/end-to-end/src/e2e_deploy_contract/legacy.test.ts similarity index 100% rename from yarn-project/end-to-end/src/flakey_e2e_deploy_contract/legacy.test.ts rename to yarn-project/end-to-end/src/e2e_deploy_contract/legacy.test.ts diff --git a/yarn-project/end-to-end/src/flakey_e2e_deploy_contract/private_initialization.test.ts b/yarn-project/end-to-end/src/e2e_deploy_contract/private_initialization.test.ts similarity index 100% rename from yarn-project/end-to-end/src/flakey_e2e_deploy_contract/private_initialization.test.ts rename to yarn-project/end-to-end/src/e2e_deploy_contract/private_initialization.test.ts diff --git a/yarn-project/end-to-end/src/flakey_e2e_deploy_contract/regressions.test.ts b/yarn-project/end-to-end/src/e2e_deploy_contract/regressions.test.ts similarity index 100% rename from yarn-project/end-to-end/src/flakey_e2e_deploy_contract/regressions.test.ts rename to yarn-project/end-to-end/src/e2e_deploy_contract/regressions.test.ts From ffd5f460fce8b1f12265730f97c8cfcd3a4774ca Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Thu, 25 Apr 2024 16:25:44 +0100 Subject: [PATCH 026/201] chore!: remove `Opcode::Brillig` from ACIR (#5995) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR removes `Opcode::Brillig` as it has been superceded by `Opcode::BrilligCall` --------- Co-authored-by: ludamad Co-authored-by: ledwards2225 <98505400+ledwards2225@users.noreply.github.com> Co-authored-by: spypsy Co-authored-by: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Co-authored-by: Maxim Vezenov Co-authored-by: Alex Gherghisan Co-authored-by: Lasse Herskind <16536249+LHerskind@users.noreply.github.com> Co-authored-by: Jan Beneš --- .../dsl/acir_format/serde/acir.hpp | 385 ++++++------------ .../acvm-repo/acir/benches/serialization.rs | 1 + .../noir-repo/acvm-repo/acir/codegen/acir.cpp | 364 ++++++----------- .../acvm-repo/acir/src/circuit/brillig.rs | 10 - .../acvm-repo/acir/src/circuit/opcodes.rs | 9 +- .../acir/tests/test_program_serialization.rs | 138 ++++--- .../acvm/src/compiler/optimizers/mod.rs | 2 +- .../acvm/src/compiler/transformers/mod.rs | 14 - .../acvm-repo/acvm/src/pwg/brillig.rs | 22 +- noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs | 46 --- noir/noir-repo/acvm-repo/acvm/tests/solver.rs | 285 ++++++------- .../test/shared/complex_foreign_call.ts | 14 +- .../acvm_js/test/shared/foreign_call.ts | 8 +- .../acvm_js/test/shared/memory_op.ts | 8 +- .../acvm_js/test/shared/nested_acir_call.ts | 14 +- 15 files changed, 504 insertions(+), 816 deletions(-) diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp index 5fd06543467..09005b29941 100644 --- a/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_format/serde/acir.hpp @@ -348,6 +348,134 @@ struct BrilligInputs { static BrilligInputs bincodeDeserialize(std::vector); }; +struct BrilligOutputs { + + struct Simple { + Program::Witness value; + + friend bool operator==(const Simple&, const Simple&); + std::vector bincodeSerialize() const; + static Simple bincodeDeserialize(std::vector); + }; + + struct Array { + std::vector value; + + friend bool operator==(const Array&, const Array&); + std::vector bincodeSerialize() const; + static Array bincodeDeserialize(std::vector); + }; + + std::variant value; + + friend bool operator==(const BrilligOutputs&, const BrilligOutputs&); + std::vector bincodeSerialize() const; + static BrilligOutputs bincodeDeserialize(std::vector); +}; + +struct Directive { + + struct ToLeRadix { + Program::Expression a; + std::vector b; + uint32_t radix; + + friend bool operator==(const ToLeRadix&, const ToLeRadix&); + std::vector bincodeSerialize() const; + static ToLeRadix bincodeDeserialize(std::vector); + }; + + std::variant value; + + friend bool operator==(const Directive&, const Directive&); + std::vector bincodeSerialize() const; + static Directive bincodeDeserialize(std::vector); +}; + +struct MemOp { + Program::Expression operation; + Program::Expression index; + Program::Expression value; + + friend bool operator==(const MemOp&, const MemOp&); + std::vector bincodeSerialize() const; + static MemOp bincodeDeserialize(std::vector); +}; + +struct Opcode { + + struct AssertZero { + Program::Expression value; + + friend bool operator==(const AssertZero&, const AssertZero&); + std::vector bincodeSerialize() const; + static AssertZero bincodeDeserialize(std::vector); + }; + + struct BlackBoxFuncCall { + Program::BlackBoxFuncCall value; + + friend bool operator==(const BlackBoxFuncCall&, const BlackBoxFuncCall&); + std::vector bincodeSerialize() const; + static BlackBoxFuncCall bincodeDeserialize(std::vector); + }; + + struct Directive { + Program::Directive value; + + friend bool operator==(const Directive&, const Directive&); + std::vector bincodeSerialize() const; + static Directive bincodeDeserialize(std::vector); + }; + + struct MemoryOp { + Program::BlockId block_id; + Program::MemOp op; + std::optional predicate; + + friend bool operator==(const MemoryOp&, const MemoryOp&); + std::vector bincodeSerialize() const; + static MemoryOp bincodeDeserialize(std::vector); + }; + + struct MemoryInit { + Program::BlockId block_id; + std::vector init; + + friend bool operator==(const MemoryInit&, const MemoryInit&); + std::vector bincodeSerialize() const; + static MemoryInit bincodeDeserialize(std::vector); + }; + + struct BrilligCall { + uint32_t id; + std::vector inputs; + std::vector outputs; + std::optional predicate; + + friend bool operator==(const BrilligCall&, const BrilligCall&); + std::vector bincodeSerialize() const; + static BrilligCall bincodeDeserialize(std::vector); + }; + + struct Call { + uint32_t id; + std::vector inputs; + std::vector outputs; + std::optional predicate; + + friend bool operator==(const Call&, const Call&); + std::vector bincodeSerialize() const; + static Call bincodeDeserialize(std::vector); + }; + + std::variant value; + + friend bool operator==(const Opcode&, const Opcode&); + std::vector bincodeSerialize() const; + static Opcode bincodeDeserialize(std::vector); +}; + struct BinaryFieldOp { struct Add { @@ -1008,153 +1136,6 @@ struct BrilligOpcode { static BrilligOpcode bincodeDeserialize(std::vector); }; -struct BrilligOutputs { - - struct Simple { - Program::Witness value; - - friend bool operator==(const Simple&, const Simple&); - std::vector bincodeSerialize() const; - static Simple bincodeDeserialize(std::vector); - }; - - struct Array { - std::vector value; - - friend bool operator==(const Array&, const Array&); - std::vector bincodeSerialize() const; - static Array bincodeDeserialize(std::vector); - }; - - std::variant value; - - friend bool operator==(const BrilligOutputs&, const BrilligOutputs&); - std::vector bincodeSerialize() const; - static BrilligOutputs bincodeDeserialize(std::vector); -}; - -struct Brillig { - std::vector inputs; - std::vector outputs; - std::vector bytecode; - std::optional predicate; - - friend bool operator==(const Brillig&, const Brillig&); - std::vector bincodeSerialize() const; - static Brillig bincodeDeserialize(std::vector); -}; - -struct Directive { - - struct ToLeRadix { - Program::Expression a; - std::vector b; - uint32_t radix; - - friend bool operator==(const ToLeRadix&, const ToLeRadix&); - std::vector bincodeSerialize() const; - static ToLeRadix bincodeDeserialize(std::vector); - }; - - std::variant value; - - friend bool operator==(const Directive&, const Directive&); - std::vector bincodeSerialize() const; - static Directive bincodeDeserialize(std::vector); -}; - -struct MemOp { - Program::Expression operation; - Program::Expression index; - Program::Expression value; - - friend bool operator==(const MemOp&, const MemOp&); - std::vector bincodeSerialize() const; - static MemOp bincodeDeserialize(std::vector); -}; - -struct Opcode { - - struct AssertZero { - Program::Expression value; - - friend bool operator==(const AssertZero&, const AssertZero&); - std::vector bincodeSerialize() const; - static AssertZero bincodeDeserialize(std::vector); - }; - - struct BlackBoxFuncCall { - Program::BlackBoxFuncCall value; - - friend bool operator==(const BlackBoxFuncCall&, const BlackBoxFuncCall&); - std::vector bincodeSerialize() const; - static BlackBoxFuncCall bincodeDeserialize(std::vector); - }; - - struct Directive { - Program::Directive value; - - friend bool operator==(const Directive&, const Directive&); - std::vector bincodeSerialize() const; - static Directive bincodeDeserialize(std::vector); - }; - - struct Brillig { - Program::Brillig value; - - friend bool operator==(const Brillig&, const Brillig&); - std::vector bincodeSerialize() const; - static Brillig bincodeDeserialize(std::vector); - }; - - struct MemoryOp { - Program::BlockId block_id; - Program::MemOp op; - std::optional predicate; - - friend bool operator==(const MemoryOp&, const MemoryOp&); - std::vector bincodeSerialize() const; - static MemoryOp bincodeDeserialize(std::vector); - }; - - struct MemoryInit { - Program::BlockId block_id; - std::vector init; - - friend bool operator==(const MemoryInit&, const MemoryInit&); - std::vector bincodeSerialize() const; - static MemoryInit bincodeDeserialize(std::vector); - }; - - struct BrilligCall { - uint32_t id; - std::vector inputs; - std::vector outputs; - std::optional predicate; - - friend bool operator==(const BrilligCall&, const BrilligCall&); - std::vector bincodeSerialize() const; - static BrilligCall bincodeDeserialize(std::vector); - }; - - struct Call { - uint32_t id; - std::vector inputs; - std::vector outputs; - std::optional predicate; - - friend bool operator==(const Call&, const Call&); - std::vector bincodeSerialize() const; - static Call bincodeDeserialize(std::vector); - }; - - std::variant value; - - friend bool operator==(const Opcode&, const Opcode&); - std::vector bincodeSerialize() const; - static Opcode bincodeDeserialize(std::vector); -}; - struct ExpressionWidth { struct Unbounded { @@ -4837,70 +4818,6 @@ Program::BlockId serde::Deserializable::deserialize(Deserializ namespace Program { -inline bool operator==(const Brillig& lhs, const Brillig& rhs) -{ - if (!(lhs.inputs == rhs.inputs)) { - return false; - } - if (!(lhs.outputs == rhs.outputs)) { - return false; - } - if (!(lhs.bytecode == rhs.bytecode)) { - return false; - } - if (!(lhs.predicate == rhs.predicate)) { - return false; - } - return true; -} - -inline std::vector Brillig::bincodeSerialize() const -{ - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); -} - -inline Brillig Brillig::bincodeDeserialize(std::vector input) -{ - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw_or_abort("Some input bytes were not read"); - } - return value; -} - -} // end of namespace Program - -template <> -template -void serde::Serializable::serialize(const Program::Brillig& obj, Serializer& serializer) -{ - serializer.increase_container_depth(); - serde::Serializable::serialize(obj.inputs, serializer); - serde::Serializable::serialize(obj.outputs, serializer); - serde::Serializable::serialize(obj.bytecode, serializer); - serde::Serializable::serialize(obj.predicate, serializer); - serializer.decrease_container_depth(); -} - -template <> -template -Program::Brillig serde::Deserializable::deserialize(Deserializer& deserializer) -{ - deserializer.increase_container_depth(); - Program::Brillig obj; - obj.inputs = serde::Deserializable::deserialize(deserializer); - obj.outputs = serde::Deserializable::deserialize(deserializer); - obj.bytecode = serde::Deserializable::deserialize(deserializer); - obj.predicate = serde::Deserializable::deserialize(deserializer); - deserializer.decrease_container_depth(); - return obj; -} - -namespace Program { - inline bool operator==(const BrilligBytecode& lhs, const BrilligBytecode& rhs) { if (!(lhs.bytecode == rhs.bytecode)) { @@ -7348,52 +7265,6 @@ Program::Opcode::Directive serde::Deserializable::de namespace Program { -inline bool operator==(const Opcode::Brillig& lhs, const Opcode::Brillig& rhs) -{ - if (!(lhs.value == rhs.value)) { - return false; - } - return true; -} - -inline std::vector Opcode::Brillig::bincodeSerialize() const -{ - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); -} - -inline Opcode::Brillig Opcode::Brillig::bincodeDeserialize(std::vector input) -{ - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw_or_abort("Some input bytes were not read"); - } - return value; -} - -} // end of namespace Program - -template <> -template -void serde::Serializable::serialize(const Program::Opcode::Brillig& obj, - Serializer& serializer) -{ - serde::Serializable::serialize(obj.value, serializer); -} - -template <> -template -Program::Opcode::Brillig serde::Deserializable::deserialize(Deserializer& deserializer) -{ - Program::Opcode::Brillig obj; - obj.value = serde::Deserializable::deserialize(deserializer); - return obj; -} - -namespace Program { - inline bool operator==(const Opcode::MemoryOp& lhs, const Opcode::MemoryOp& rhs) { if (!(lhs.block_id == rhs.block_id)) { diff --git a/noir/noir-repo/acvm-repo/acir/benches/serialization.rs b/noir/noir-repo/acvm-repo/acir/benches/serialization.rs index 73e3916a73b..e51726e3901 100644 --- a/noir/noir-repo/acvm-repo/acir/benches/serialization.rs +++ b/noir/noir-repo/acvm-repo/acir/benches/serialization.rs @@ -40,6 +40,7 @@ fn sample_program(num_opcodes: usize) -> Program { assert_messages: Vec::new(), recursive: false, }], + unconstrained_functions: Vec::new(), } } diff --git a/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp b/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp index 6c7bd347e5d..9ce25c6fd94 100644 --- a/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp +++ b/noir/noir-repo/acvm-repo/acir/codegen/acir.cpp @@ -324,6 +324,134 @@ namespace Program { static BrilligInputs bincodeDeserialize(std::vector); }; + struct BrilligOutputs { + + struct Simple { + Program::Witness value; + + friend bool operator==(const Simple&, const Simple&); + std::vector bincodeSerialize() const; + static Simple bincodeDeserialize(std::vector); + }; + + struct Array { + std::vector value; + + friend bool operator==(const Array&, const Array&); + std::vector bincodeSerialize() const; + static Array bincodeDeserialize(std::vector); + }; + + std::variant value; + + friend bool operator==(const BrilligOutputs&, const BrilligOutputs&); + std::vector bincodeSerialize() const; + static BrilligOutputs bincodeDeserialize(std::vector); + }; + + struct Directive { + + struct ToLeRadix { + Program::Expression a; + std::vector b; + uint32_t radix; + + friend bool operator==(const ToLeRadix&, const ToLeRadix&); + std::vector bincodeSerialize() const; + static ToLeRadix bincodeDeserialize(std::vector); + }; + + std::variant value; + + friend bool operator==(const Directive&, const Directive&); + std::vector bincodeSerialize() const; + static Directive bincodeDeserialize(std::vector); + }; + + struct MemOp { + Program::Expression operation; + Program::Expression index; + Program::Expression value; + + friend bool operator==(const MemOp&, const MemOp&); + std::vector bincodeSerialize() const; + static MemOp bincodeDeserialize(std::vector); + }; + + struct Opcode { + + struct AssertZero { + Program::Expression value; + + friend bool operator==(const AssertZero&, const AssertZero&); + std::vector bincodeSerialize() const; + static AssertZero bincodeDeserialize(std::vector); + }; + + struct BlackBoxFuncCall { + Program::BlackBoxFuncCall value; + + friend bool operator==(const BlackBoxFuncCall&, const BlackBoxFuncCall&); + std::vector bincodeSerialize() const; + static BlackBoxFuncCall bincodeDeserialize(std::vector); + }; + + struct Directive { + Program::Directive value; + + friend bool operator==(const Directive&, const Directive&); + std::vector bincodeSerialize() const; + static Directive bincodeDeserialize(std::vector); + }; + + struct MemoryOp { + Program::BlockId block_id; + Program::MemOp op; + std::optional predicate; + + friend bool operator==(const MemoryOp&, const MemoryOp&); + std::vector bincodeSerialize() const; + static MemoryOp bincodeDeserialize(std::vector); + }; + + struct MemoryInit { + Program::BlockId block_id; + std::vector init; + + friend bool operator==(const MemoryInit&, const MemoryInit&); + std::vector bincodeSerialize() const; + static MemoryInit bincodeDeserialize(std::vector); + }; + + struct BrilligCall { + uint32_t id; + std::vector inputs; + std::vector outputs; + std::optional predicate; + + friend bool operator==(const BrilligCall&, const BrilligCall&); + std::vector bincodeSerialize() const; + static BrilligCall bincodeDeserialize(std::vector); + }; + + struct Call { + uint32_t id; + std::vector inputs; + std::vector outputs; + std::optional predicate; + + friend bool operator==(const Call&, const Call&); + std::vector bincodeSerialize() const; + static Call bincodeDeserialize(std::vector); + }; + + std::variant value; + + friend bool operator==(const Opcode&, const Opcode&); + std::vector bincodeSerialize() const; + static Opcode bincodeDeserialize(std::vector); + }; + struct BinaryFieldOp { struct Add { @@ -946,153 +1074,6 @@ namespace Program { static BrilligOpcode bincodeDeserialize(std::vector); }; - struct BrilligOutputs { - - struct Simple { - Program::Witness value; - - friend bool operator==(const Simple&, const Simple&); - std::vector bincodeSerialize() const; - static Simple bincodeDeserialize(std::vector); - }; - - struct Array { - std::vector value; - - friend bool operator==(const Array&, const Array&); - std::vector bincodeSerialize() const; - static Array bincodeDeserialize(std::vector); - }; - - std::variant value; - - friend bool operator==(const BrilligOutputs&, const BrilligOutputs&); - std::vector bincodeSerialize() const; - static BrilligOutputs bincodeDeserialize(std::vector); - }; - - struct Brillig { - std::vector inputs; - std::vector outputs; - std::vector bytecode; - std::optional predicate; - - friend bool operator==(const Brillig&, const Brillig&); - std::vector bincodeSerialize() const; - static Brillig bincodeDeserialize(std::vector); - }; - - struct Directive { - - struct ToLeRadix { - Program::Expression a; - std::vector b; - uint32_t radix; - - friend bool operator==(const ToLeRadix&, const ToLeRadix&); - std::vector bincodeSerialize() const; - static ToLeRadix bincodeDeserialize(std::vector); - }; - - std::variant value; - - friend bool operator==(const Directive&, const Directive&); - std::vector bincodeSerialize() const; - static Directive bincodeDeserialize(std::vector); - }; - - struct MemOp { - Program::Expression operation; - Program::Expression index; - Program::Expression value; - - friend bool operator==(const MemOp&, const MemOp&); - std::vector bincodeSerialize() const; - static MemOp bincodeDeserialize(std::vector); - }; - - struct Opcode { - - struct AssertZero { - Program::Expression value; - - friend bool operator==(const AssertZero&, const AssertZero&); - std::vector bincodeSerialize() const; - static AssertZero bincodeDeserialize(std::vector); - }; - - struct BlackBoxFuncCall { - Program::BlackBoxFuncCall value; - - friend bool operator==(const BlackBoxFuncCall&, const BlackBoxFuncCall&); - std::vector bincodeSerialize() const; - static BlackBoxFuncCall bincodeDeserialize(std::vector); - }; - - struct Directive { - Program::Directive value; - - friend bool operator==(const Directive&, const Directive&); - std::vector bincodeSerialize() const; - static Directive bincodeDeserialize(std::vector); - }; - - struct Brillig { - Program::Brillig value; - - friend bool operator==(const Brillig&, const Brillig&); - std::vector bincodeSerialize() const; - static Brillig bincodeDeserialize(std::vector); - }; - - struct MemoryOp { - Program::BlockId block_id; - Program::MemOp op; - std::optional predicate; - - friend bool operator==(const MemoryOp&, const MemoryOp&); - std::vector bincodeSerialize() const; - static MemoryOp bincodeDeserialize(std::vector); - }; - - struct MemoryInit { - Program::BlockId block_id; - std::vector init; - - friend bool operator==(const MemoryInit&, const MemoryInit&); - std::vector bincodeSerialize() const; - static MemoryInit bincodeDeserialize(std::vector); - }; - - struct BrilligCall { - uint32_t id; - std::vector inputs; - std::vector outputs; - std::optional predicate; - - friend bool operator==(const BrilligCall&, const BrilligCall&); - std::vector bincodeSerialize() const; - static BrilligCall bincodeDeserialize(std::vector); - }; - - struct Call { - uint32_t id; - std::vector inputs; - std::vector outputs; - std::optional predicate; - - friend bool operator==(const Call&, const Call&); - std::vector bincodeSerialize() const; - static Call bincodeDeserialize(std::vector); - }; - - std::variant value; - - friend bool operator==(const Opcode&, const Opcode&); - std::vector bincodeSerialize() const; - static Opcode bincodeDeserialize(std::vector); - }; - struct ExpressionWidth { struct Unbounded { @@ -4043,57 +4024,6 @@ Program::BlockId serde::Deserializable::deserialize(Deserializ return obj; } -namespace Program { - - inline bool operator==(const Brillig &lhs, const Brillig &rhs) { - if (!(lhs.inputs == rhs.inputs)) { return false; } - if (!(lhs.outputs == rhs.outputs)) { return false; } - if (!(lhs.bytecode == rhs.bytecode)) { return false; } - if (!(lhs.predicate == rhs.predicate)) { return false; } - return true; - } - - inline std::vector Brillig::bincodeSerialize() const { - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); - } - - inline Brillig Brillig::bincodeDeserialize(std::vector input) { - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw serde::deserialization_error("Some input bytes were not read"); - } - return value; - } - -} // end of namespace Program - -template <> -template -void serde::Serializable::serialize(const Program::Brillig &obj, Serializer &serializer) { - serializer.increase_container_depth(); - serde::Serializable::serialize(obj.inputs, serializer); - serde::Serializable::serialize(obj.outputs, serializer); - serde::Serializable::serialize(obj.bytecode, serializer); - serde::Serializable::serialize(obj.predicate, serializer); - serializer.decrease_container_depth(); -} - -template <> -template -Program::Brillig serde::Deserializable::deserialize(Deserializer &deserializer) { - deserializer.increase_container_depth(); - Program::Brillig obj; - obj.inputs = serde::Deserializable::deserialize(deserializer); - obj.outputs = serde::Deserializable::deserialize(deserializer); - obj.bytecode = serde::Deserializable::deserialize(deserializer); - obj.predicate = serde::Deserializable::deserialize(deserializer); - deserializer.decrease_container_depth(); - return obj; -} - namespace Program { inline bool operator==(const BrilligBytecode &lhs, const BrilligBytecode &rhs) { @@ -6066,44 +5996,6 @@ Program::Opcode::Directive serde::Deserializable::de return obj; } -namespace Program { - - inline bool operator==(const Opcode::Brillig &lhs, const Opcode::Brillig &rhs) { - if (!(lhs.value == rhs.value)) { return false; } - return true; - } - - inline std::vector Opcode::Brillig::bincodeSerialize() const { - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); - } - - inline Opcode::Brillig Opcode::Brillig::bincodeDeserialize(std::vector input) { - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw serde::deserialization_error("Some input bytes were not read"); - } - return value; - } - -} // end of namespace Program - -template <> -template -void serde::Serializable::serialize(const Program::Opcode::Brillig &obj, Serializer &serializer) { - serde::Serializable::serialize(obj.value, serializer); -} - -template <> -template -Program::Opcode::Brillig serde::Deserializable::deserialize(Deserializer &deserializer) { - Program::Opcode::Brillig obj; - obj.value = serde::Deserializable::deserialize(deserializer); - return obj; -} - namespace Program { inline bool operator==(const Opcode::MemoryOp &lhs, const Opcode::MemoryOp &rhs) { diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/brillig.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/brillig.rs index 7f87aabf9d5..ecf6f7a9761 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/brillig.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/brillig.rs @@ -20,16 +20,6 @@ pub enum BrilligOutputs { Array(Vec), } -#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug)] -pub struct Brillig { - pub inputs: Vec, - pub outputs: Vec, - /// The Brillig VM bytecode to be executed by this ACIR opcode. - pub bytecode: Vec, - /// Predicate of the Brillig execution - indicates if it should be skipped - pub predicate: Option, -} - /// This is purely a wrapper struct around a list of Brillig opcode's which represents /// a full Brillig function to be executed by the Brillig VM. /// This is stored separately on a program and accessed through a [BrilligPointer]. diff --git a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs index b0b8e286e0c..7db317c41ab 100644 --- a/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs +++ b/noir/noir-repo/acvm-repo/acir/src/circuit/opcodes.rs @@ -1,5 +1,5 @@ use super::{ - brillig::{Brillig, BrilligInputs, BrilligOutputs}, + brillig::{BrilligInputs, BrilligOutputs}, directives::Directive, }; use crate::native_types::{Expression, Witness}; @@ -20,7 +20,6 @@ pub enum Opcode { /// Often used for exposing more efficient implementations of SNARK-unfriendly computations. BlackBoxFuncCall(BlackBoxFuncCall), Directive(Directive), - Brillig(Brillig), /// Atomic operation on a block of memory MemoryOp { block_id: BlockId, @@ -88,12 +87,6 @@ impl std::fmt::Display for Opcode { b.last().unwrap().witness_index(), ) } - Opcode::Brillig(brillig) => { - write!(f, "BRILLIG: ")?; - writeln!(f, "inputs: {:?}", brillig.inputs)?; - writeln!(f, "outputs: {:?}", brillig.outputs)?; - writeln!(f, "{:?}", brillig.bytecode) - } Opcode::MemoryOp { block_id, op, predicate } => { write!(f, "MEM ")?; if let Some(pred) = predicate { diff --git a/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs b/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs index fb924a7437d..c5912b61cf1 100644 --- a/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs +++ b/noir/noir-repo/acvm-repo/acir/tests/test_program_serialization.rs @@ -13,7 +13,7 @@ use std::collections::BTreeSet; use acir::{ circuit::{ - brillig::{Brillig, BrilligInputs, BrilligOutputs}, + brillig::{BrilligBytecode, BrilligInputs, BrilligOutputs}, opcodes::{BlackBoxFuncCall, BlockId, FunctionInput, MemOp}, Circuit, Opcode, Program, PublicInputs, }, @@ -176,14 +176,7 @@ fn simple_brillig_foreign_call() { let w_input = Witness(1); let w_inverted = Witness(2); - let brillig_data = Brillig { - inputs: vec![ - BrilligInputs::Single(w_input.into()), // Input Register 0, - ], - // This tells the BrilligSolver which witnesses its output values correspond to - outputs: vec![ - BrilligOutputs::Simple(w_inverted), // Output Register 1 - ], + let brillig_bytecode = BrilligBytecode { bytecode: vec![ brillig::Opcode::CalldataCopy { destination_address: MemoryAddress(0), @@ -199,27 +192,38 @@ fn simple_brillig_foreign_call() { }, brillig::Opcode::Stop { return_data_offset: 0, return_data_size: 1 }, ], - predicate: None, }; - let opcodes = vec![Opcode::Brillig(brillig_data)]; + let opcodes = vec![Opcode::BrilligCall { + id: 0, + inputs: vec![ + BrilligInputs::Single(w_input.into()), // Input Register 0, + ], + // This tells the BrilligSolver which witnesses its output values correspond to + outputs: vec![ + BrilligOutputs::Simple(w_inverted), // Output Register 1 + ], + predicate: None, + }]; + let circuit = Circuit { current_witness_index: 8, opcodes, private_parameters: BTreeSet::from([Witness(1), Witness(2)]), ..Circuit::default() }; - let program = Program { functions: vec![circuit], unconstrained_functions: vec![] }; + let program = + Program { functions: vec![circuit], unconstrained_functions: vec![brillig_bytecode] }; let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 173, 144, 61, 10, 192, 32, 12, 133, 19, 11, 165, 116, - 235, 77, 236, 13, 122, 153, 14, 93, 58, 136, 120, 124, 241, 47, 129, 12, 42, 130, 126, 16, - 18, 146, 16, 222, 11, 66, 225, 136, 129, 84, 111, 162, 150, 112, 239, 161, 172, 231, 184, - 113, 221, 45, 45, 245, 42, 242, 144, 216, 43, 250, 153, 83, 204, 191, 223, 189, 198, 246, - 92, 39, 60, 244, 63, 195, 59, 87, 99, 150, 165, 113, 83, 193, 0, 1, 19, 247, 29, 5, 160, 1, - 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 173, 144, 193, 10, 192, 32, 8, 134, 117, 99, 99, 236, + 182, 55, 105, 111, 176, 151, 217, 161, 75, 135, 136, 30, 63, 42, 82, 144, 8, 47, 245, 65, + 252, 230, 47, 162, 34, 52, 174, 242, 144, 226, 131, 148, 255, 18, 206, 125, 164, 102, 142, + 23, 215, 245, 50, 114, 222, 173, 15, 80, 38, 65, 217, 108, 39, 61, 7, 30, 115, 11, 223, + 186, 248, 251, 160, 221, 170, 146, 64, 191, 39, 215, 60, 3, 47, 3, 99, 171, 188, 84, 164, + 1, 0, 0, ]; assert_eq!(bytes, expected_serialization) @@ -239,27 +243,7 @@ fn complex_brillig_foreign_call() { let a_plus_b_plus_c = Witness(7); let a_plus_b_plus_c_times_2 = Witness(8); - let brillig_data = Brillig { - inputs: vec![ - // Input 0,1,2 - BrilligInputs::Array(vec![ - Expression::from(a), - Expression::from(b), - Expression::from(c), - ]), - // Input 3 - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(fe_1, a), (fe_1, b), (fe_1, c)], - q_c: fe_0, - }), - ], - // This tells the BrilligSolver which witnesses its output values correspond to - outputs: vec![ - BrilligOutputs::Array(vec![a_times_2, b_times_3, c_times_4]), // Output 0,1,2 - BrilligOutputs::Simple(a_plus_b_plus_c), // Output 3 - BrilligOutputs::Simple(a_plus_b_plus_c_times_2), // Output 4 - ], + let brillig_bytecode = BrilligBytecode { bytecode: vec![ brillig::Opcode::CalldataCopy { destination_address: MemoryAddress(32), @@ -300,30 +284,54 @@ fn complex_brillig_foreign_call() { }, brillig::Opcode::Stop { return_data_offset: 32, return_data_size: 5 }, ], - predicate: None, }; - let opcodes = vec![Opcode::Brillig(brillig_data)]; + let opcodes = vec![Opcode::BrilligCall { + id: 0, + inputs: vec![ + // Input 0,1,2 + BrilligInputs::Array(vec![ + Expression::from(a), + Expression::from(b), + Expression::from(c), + ]), + // Input 3 + BrilligInputs::Single(Expression { + mul_terms: vec![], + linear_combinations: vec![(fe_1, a), (fe_1, b), (fe_1, c)], + q_c: fe_0, + }), + ], + // This tells the BrilligSolver which witnesses its output values correspond to + outputs: vec![ + BrilligOutputs::Array(vec![a_times_2, b_times_3, c_times_4]), // Output 0,1,2 + BrilligOutputs::Simple(a_plus_b_plus_c), // Output 3 + BrilligOutputs::Simple(a_plus_b_plus_c_times_2), // Output 4 + ], + predicate: None, + }]; + let circuit = Circuit { current_witness_index: 8, opcodes, private_parameters: BTreeSet::from([Witness(1), Witness(2), Witness(3)]), ..Circuit::default() }; - let program = Program { functions: vec![circuit], unconstrained_functions: vec![] }; + let program = + Program { functions: vec![circuit], unconstrained_functions: vec![brillig_bytecode] }; let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 84, 75, 10, 132, 48, 12, 77, 218, 209, 145, 217, - 205, 13, 6, 198, 3, 84, 79, 224, 93, 196, 157, 162, 75, 79, 47, 22, 124, 197, 16, 186, 17, - 43, 104, 32, 36, 109, 126, 143, 36, 45, 211, 70, 133, 103, 134, 110, 61, 27, 232, 140, 179, - 164, 224, 215, 64, 186, 115, 84, 113, 186, 92, 238, 42, 140, 230, 1, 24, 237, 5, 24, 195, - 62, 220, 116, 222, 41, 231, 146, 180, 127, 54, 242, 126, 94, 158, 51, 207, 57, 206, 111, - 200, 2, 247, 4, 219, 79, 245, 157, 132, 31, 137, 89, 52, 73, 176, 214, 46, 167, 125, 23, - 89, 213, 254, 8, 156, 237, 56, 76, 125, 55, 91, 229, 170, 161, 254, 133, 94, 42, 59, 171, - 184, 69, 197, 46, 66, 202, 47, 40, 86, 39, 220, 155, 3, 185, 191, 180, 183, 55, 163, 72, - 98, 70, 66, 221, 251, 40, 173, 255, 35, 68, 62, 61, 5, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 84, 75, 10, 131, 64, 12, 77, 102, 90, 43, 221, 245, + 6, 133, 246, 0, 211, 158, 192, 187, 136, 59, 69, 151, 158, 94, 116, 48, 131, 241, 233, 70, + 28, 65, 3, 195, 155, 79, 62, 47, 9, 25, 166, 81, 210, 97, 177, 236, 239, 130, 70, 208, 223, + 91, 154, 75, 208, 205, 4, 221, 62, 249, 113, 60, 95, 238, 40, 142, 230, 2, 28, 237, 1, 28, + 73, 245, 255, 132, 253, 142, 217, 151, 168, 245, 179, 43, 243, 115, 163, 113, 190, 18, 57, + 63, 4, 83, 44, 180, 55, 50, 180, 28, 188, 153, 224, 196, 122, 175, 111, 112, 68, 24, 65, + 50, 204, 162, 100, 249, 119, 137, 226, 193, 16, 251, 169, 50, 204, 235, 170, 41, 139, 214, + 130, 42, 82, 253, 168, 253, 23, 222, 25, 236, 58, 176, 237, 20, 234, 207, 107, 45, 78, 184, + 55, 27, 124, 191, 104, 42, 111, 40, 121, 15, 94, 163, 77, 128, 65, 5, 0, 0, ]; assert_eq!(bytes, expected_serialization) @@ -357,11 +365,11 @@ fn memory_op_circuit() { let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 81, 57, 14, 0, 32, 8, 147, 195, 255, 224, 15, 252, - 255, 171, 212, 200, 208, 129, 77, 24, 108, 66, 90, 150, 166, 20, 106, 23, 125, 143, 128, - 62, 96, 103, 114, 173, 45, 198, 116, 182, 55, 140, 106, 95, 74, 246, 149, 60, 47, 171, 46, - 215, 126, 43, 87, 179, 111, 23, 8, 202, 176, 99, 248, 240, 9, 11, 137, 33, 212, 110, 35, 3, - 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 82, 65, 10, 0, 32, 8, 203, 180, 255, 216, 15, 250, + 255, 171, 10, 154, 16, 210, 45, 61, 52, 144, 13, 132, 49, 135, 84, 54, 218, 26, 134, 22, + 112, 5, 19, 180, 237, 61, 6, 88, 223, 208, 179, 125, 41, 216, 151, 227, 188, 52, 187, 92, + 253, 173, 92, 137, 190, 157, 143, 160, 254, 155, 45, 188, 148, 11, 38, 213, 237, 188, 16, + 35, 3, 0, 0, ]; assert_eq!(bytes, expected_serialization) @@ -460,15 +468,15 @@ fn nested_acir_call_circuit() { let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 205, 146, 65, 10, 3, 33, 12, 69, 163, 46, 230, 58, 137, - 209, 49, 238, 122, 149, 74, 157, 251, 31, 161, 83, 154, 161, 86, 132, 89, 212, 194, 124, - 248, 24, 36, 132, 228, 241, 29, 188, 229, 212, 47, 45, 187, 205, 110, 11, 31, 25, 53, 28, - 255, 103, 77, 14, 58, 29, 141, 55, 125, 241, 55, 145, 109, 102, 49, 174, 33, 212, 228, 43, - 49, 221, 209, 231, 34, 17, 67, 44, 171, 144, 80, 148, 248, 240, 194, 92, 37, 72, 202, 37, - 39, 204, 20, 184, 210, 22, 51, 111, 58, 204, 205, 219, 11, 161, 129, 208, 214, 6, 6, 114, - 29, 193, 127, 193, 130, 137, 176, 236, 188, 189, 252, 162, 183, 218, 230, 238, 97, 138, - 250, 152, 245, 245, 87, 220, 12, 140, 113, 95, 153, 170, 129, 185, 17, 60, 3, 54, 212, 19, - 104, 145, 195, 151, 14, 4, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 205, 146, 97, 10, 195, 32, 12, 133, 163, 66, 207, 147, + 24, 173, 241, 223, 174, 50, 153, 189, 255, 17, 214, 177, 148, 57, 17, 250, 99, 14, 250, + 224, 97, 144, 16, 146, 143, 231, 224, 45, 167, 126, 105, 217, 109, 118, 91, 248, 200, 168, + 225, 248, 63, 107, 114, 208, 233, 104, 188, 233, 139, 191, 137, 108, 51, 139, 113, 13, 161, + 38, 95, 137, 233, 142, 62, 23, 137, 24, 98, 89, 133, 132, 162, 196, 135, 23, 230, 42, 65, + 82, 46, 57, 97, 166, 192, 149, 182, 152, 121, 211, 97, 110, 222, 94, 8, 13, 132, 182, 54, + 48, 144, 235, 8, 254, 11, 22, 76, 132, 101, 231, 237, 229, 23, 189, 213, 54, 119, 15, 83, + 212, 199, 172, 175, 191, 226, 102, 96, 140, 251, 202, 84, 13, 204, 141, 224, 25, 176, 161, + 158, 53, 121, 144, 73, 14, 4, 0, 0, ]; assert_eq!(bytes, expected_serialization); } diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/mod.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/mod.rs index 04d3f99a408..dfe348d4ff5 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/mod.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/mod.rs @@ -32,7 +32,7 @@ pub(super) fn optimize_internal(acir: Circuit) -> (Circuit, Vec) { // by applying the modifications done to the circuit opcodes and also to the opcode_positions (delete and insert) let acir_opcode_positions = (0..acir.opcodes.len()).collect(); - if acir.opcodes.len() == 1 && matches!(acir.opcodes[0], Opcode::Brillig(_)) { + if acir.opcodes.len() == 1 && matches!(acir.opcodes[0], Opcode::BrilligCall { .. }) { info!("Program is fully unconstrained, skipping optimization pass"); return (acir, acir_opcode_positions); } diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/transformers/mod.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/transformers/mod.rs index d13fac1672a..0099519e4b6 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/transformers/mod.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/transformers/mod.rs @@ -128,20 +128,6 @@ pub(super) fn transform_internal( new_acir_opcode_positions.push(acir_opcode_positions[index]); transformed_opcodes.push(opcode); } - Opcode::Brillig(ref brillig) => { - for output in &brillig.outputs { - match output { - BrilligOutputs::Simple(w) => transformer.mark_solvable(*w), - BrilligOutputs::Array(v) => { - for witness in v { - transformer.mark_solvable(*witness); - } - } - } - } - new_acir_opcode_positions.push(acir_opcode_positions[index]); - transformed_opcodes.push(opcode); - } Opcode::BrilligCall { ref outputs, .. } => { for output in outputs { match output { diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/brillig.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/brillig.rs index 10178465d58..b0a79c50aa0 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/brillig.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/brillig.rs @@ -3,7 +3,7 @@ use std::collections::HashMap; use acir::{ brillig::{ForeignCallParam, ForeignCallResult, Opcode as BrilligOpcode}, circuit::{ - brillig::{Brillig, BrilligInputs, BrilligOutputs}, + brillig::{BrilligInputs, BrilligOutputs}, opcodes::BlockId, OpcodeLocation, }, @@ -50,26 +50,6 @@ impl<'b, B: BlackBoxFunctionSolver> BrilligSolver<'b, B> { Ok(()) } - // TODO: Delete this old method once `Brillig` is deleted - /// Constructs a solver for a Brillig block given the bytecode and initial - /// witness. - pub(crate) fn new( - initial_witness: &WitnessMap, - memory: &HashMap, - brillig: &'b Brillig, - bb_solver: &'b B, - acir_index: usize, - ) -> Result { - let vm = Self::setup_brillig_vm( - initial_witness, - memory, - &brillig.inputs, - &brillig.bytecode, - bb_solver, - )?; - Ok(Self { vm, acir_index }) - } - /// Constructs a solver for a Brillig block given the bytecode and initial /// witness. pub(crate) fn new_call( diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs index 3d3c52c661b..6a1bebf4ee8 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs @@ -329,10 +329,6 @@ impl<'a, B: BlackBoxFunctionSolver> ACVM<'a, B> { let solver = self.block_solvers.entry(*block_id).or_default(); solver.solve_memory_op(op, &mut self.witness_map, predicate) } - Opcode::Brillig(_) => match self.solve_brillig_opcode() { - Ok(Some(foreign_call)) => return self.wait_for_foreign_call(foreign_call), - res => res.map(|_| ()), - }, Opcode::BrilligCall { .. } => match self.solve_brillig_call_opcode() { Ok(Some(foreign_call)) => return self.wait_for_foreign_call(foreign_call), res => res.map(|_| ()), @@ -382,48 +378,6 @@ impl<'a, B: BlackBoxFunctionSolver> ACVM<'a, B> { } } - fn solve_brillig_opcode( - &mut self, - ) -> Result, OpcodeResolutionError> { - let Opcode::Brillig(brillig) = &self.opcodes[self.instruction_pointer] else { - unreachable!("Not executing a Brillig opcode"); - }; - - let witness = &mut self.witness_map; - if is_predicate_false(witness, &brillig.predicate)? { - return BrilligSolver::::zero_out_brillig_outputs(witness, &brillig.outputs) - .map(|_| None); - } - - // If we're resuming execution after resolving a foreign call then - // there will be a cached `BrilligSolver` to avoid recomputation. - let mut solver: BrilligSolver<'_, B> = match self.brillig_solver.take() { - Some(solver) => solver, - None => BrilligSolver::new( - witness, - &self.block_solvers, - brillig, - self.backend, - self.instruction_pointer, - )?, - }; - match solver.solve()? { - BrilligSolverStatus::ForeignCallWait(foreign_call) => { - // Cache the current state of the solver - self.brillig_solver = Some(solver); - Ok(Some(foreign_call)) - } - BrilligSolverStatus::InProgress => { - unreachable!("Brillig solver still in progress") - } - BrilligSolverStatus::Finished => { - // Write execution outputs - solver.finalize(witness, &brillig.outputs)?; - Ok(None) - } - } - } - fn solve_brillig_call_opcode( &mut self, ) -> Result, OpcodeResolutionError> { diff --git a/noir/noir-repo/acvm-repo/acvm/tests/solver.rs b/noir/noir-repo/acvm-repo/acvm/tests/solver.rs index f009e2c05b8..93985f97f40 100644 --- a/noir/noir-repo/acvm-repo/acvm/tests/solver.rs +++ b/noir/noir-repo/acvm-repo/acvm/tests/solver.rs @@ -3,7 +3,7 @@ use std::collections::BTreeMap; use acir::{ brillig::{BinaryFieldOp, MemoryAddress, Opcode as BrilligOpcode, ValueOrArray}, circuit::{ - brillig::{Brillig, BrilligInputs, BrilligOutputs}, + brillig::{BrilligBytecode, BrilligInputs, BrilligOutputs}, opcodes::{BlockId, MemOp}, Opcode, OpcodeLocation, }, @@ -43,44 +43,26 @@ fn inversion_brillig_oracle_equivalence() { destination: MemoryAddress::from(2), }; - let brillig_data = Brillig { - inputs: vec![ - BrilligInputs::Single(Expression { - // Input Register 0 - mul_terms: vec![], - linear_combinations: vec![(fe_1, w_x), (fe_1, w_y)], - q_c: fe_0, - }), - BrilligInputs::Single(Expression::default()), // Input Register 1 - ], - // This tells the BrilligSolver which witnesses its output values correspond to - outputs: vec![ - BrilligOutputs::Simple(w_x_plus_y), // Output Register 0 - from input - BrilligOutputs::Simple(w_oracle), // Output Register 1 - BrilligOutputs::Simple(w_equal_res), // Output Register 2 - ], - bytecode: vec![ - BrilligOpcode::CalldataCopy { - destination_address: MemoryAddress(0), - size: 2, - offset: 0, - }, - equal_opcode, - // Oracles are named 'foreign calls' in brillig - BrilligOpcode::ForeignCall { - function: "invert".into(), - destinations: vec![ValueOrArray::MemoryAddress(MemoryAddress::from(1))], - destination_value_types: vec![HeapValueType::field()], - inputs: vec![ValueOrArray::MemoryAddress(MemoryAddress::from(0))], - input_value_types: vec![HeapValueType::field()], - }, - BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 3 }, - ], - predicate: None, - }; - let opcodes = vec![ - Opcode::Brillig(brillig_data), + Opcode::BrilligCall { + id: 0, + inputs: vec![ + BrilligInputs::Single(Expression { + // Input Register 0 + mul_terms: vec![], + linear_combinations: vec![(fe_1, w_x), (fe_1, w_y)], + q_c: fe_0, + }), + BrilligInputs::Single(Expression::default()), // Input Register 1 + ], + // This tells the BrilligSolver which witnesses its output values correspond to + outputs: vec![ + BrilligOutputs::Simple(w_x_plus_y), // Output Register 0 - from input + BrilligOutputs::Simple(w_oracle), // Output Register 1 + BrilligOutputs::Simple(w_equal_res), // Output Register 2 + ], + predicate: None, + }, Opcode::AssertZero(Expression { mul_terms: vec![], linear_combinations: vec![(fe_1, w_x), (fe_1, w_y), (-fe_1, w_z)], @@ -99,12 +81,32 @@ fn inversion_brillig_oracle_equivalence() { }), ]; + let brillig_bytecode = BrilligBytecode { + bytecode: vec![ + BrilligOpcode::CalldataCopy { + destination_address: MemoryAddress(0), + size: 2, + offset: 0, + }, + equal_opcode, + // Oracles are named 'foreign calls' in brillig + BrilligOpcode::ForeignCall { + function: "invert".into(), + destinations: vec![ValueOrArray::MemoryAddress(MemoryAddress::from(1))], + destination_value_types: vec![HeapValueType::field()], + inputs: vec![ValueOrArray::MemoryAddress(MemoryAddress::from(0))], + input_value_types: vec![HeapValueType::field()], + }, + BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 3 }, + ], + }; + let witness_assignments = BTreeMap::from([ (Witness(1), FieldElement::from(2u128)), (Witness(2), FieldElement::from(3u128)), ]) .into(); - let unconstrained_functions = vec![]; + let unconstrained_functions = vec![brillig_bytecode]; let mut acvm = ACVM::new(&StubbedBlackBoxSolver, &opcodes, witness_assignments, &unconstrained_functions); // use the partial witness generation solver with our acir program @@ -165,29 +167,52 @@ fn double_inversion_brillig_oracle() { destination: MemoryAddress::from(4), }; - let brillig_data = Brillig { - inputs: vec![ - BrilligInputs::Single(Expression { - // Input Register 0 - mul_terms: vec![], - linear_combinations: vec![(fe_1, w_x), (fe_1, w_y)], - q_c: fe_0, - }), - BrilligInputs::Single(Expression::default()), // Input Register 1 - BrilligInputs::Single(Expression { - // Input Register 2 - mul_terms: vec![], - linear_combinations: vec![(fe_1, w_i), (fe_1, w_j)], - q_c: fe_0, - }), - ], - outputs: vec![ - BrilligOutputs::Simple(w_x_plus_y), // Output Register 0 - from input - BrilligOutputs::Simple(w_oracle), // Output Register 1 - BrilligOutputs::Simple(w_i_plus_j), // Output Register 2 - from input - BrilligOutputs::Simple(w_ij_oracle), // Output Register 3 - BrilligOutputs::Simple(w_equal_res), // Output Register 4 - ], + let opcodes = vec![ + Opcode::BrilligCall { + id: 0, + inputs: vec![ + BrilligInputs::Single(Expression { + // Input Register 0 + mul_terms: vec![], + linear_combinations: vec![(fe_1, w_x), (fe_1, w_y)], + q_c: fe_0, + }), + BrilligInputs::Single(Expression::default()), // Input Register 1 + BrilligInputs::Single(Expression { + // Input Register 2 + mul_terms: vec![], + linear_combinations: vec![(fe_1, w_i), (fe_1, w_j)], + q_c: fe_0, + }), + ], + outputs: vec![ + BrilligOutputs::Simple(w_x_plus_y), // Output Register 0 - from input + BrilligOutputs::Simple(w_oracle), // Output Register 1 + BrilligOutputs::Simple(w_i_plus_j), // Output Register 2 - from input + BrilligOutputs::Simple(w_ij_oracle), // Output Register 3 + BrilligOutputs::Simple(w_equal_res), // Output Register 4 + ], + predicate: None, + }, + Opcode::AssertZero(Expression { + mul_terms: vec![], + linear_combinations: vec![(fe_1, w_x), (fe_1, w_y), (-fe_1, w_z)], + q_c: fe_0, + }), + // Opcode::Directive(Directive::Invert { x: w_z, result: w_z_inverse }), + Opcode::AssertZero(Expression { + mul_terms: vec![(fe_1, w_z, w_z_inverse)], + linear_combinations: vec![], + q_c: -fe_1, + }), + Opcode::AssertZero(Expression { + mul_terms: vec![], + linear_combinations: vec![(-fe_1, w_oracle), (fe_1, w_z_inverse)], + q_c: fe_0, + }), + ]; + + let brillig_bytecode = BrilligBytecode { bytecode: vec![ BrilligOpcode::CalldataCopy { destination_address: MemoryAddress(0), @@ -212,29 +237,8 @@ fn double_inversion_brillig_oracle() { }, BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 5 }, ], - predicate: None, }; - let opcodes = vec![ - Opcode::Brillig(brillig_data), - Opcode::AssertZero(Expression { - mul_terms: vec![], - linear_combinations: vec![(fe_1, w_x), (fe_1, w_y), (-fe_1, w_z)], - q_c: fe_0, - }), - // Opcode::Directive(Directive::Invert { x: w_z, result: w_z_inverse }), - Opcode::AssertZero(Expression { - mul_terms: vec![(fe_1, w_z, w_z_inverse)], - linear_combinations: vec![], - q_c: -fe_1, - }), - Opcode::AssertZero(Expression { - mul_terms: vec![], - linear_combinations: vec![(-fe_1, w_oracle), (fe_1, w_z_inverse)], - q_c: fe_0, - }), - ]; - let witness_assignments = BTreeMap::from([ (Witness(1), FieldElement::from(2u128)), (Witness(2), FieldElement::from(3u128)), @@ -242,7 +246,7 @@ fn double_inversion_brillig_oracle() { (Witness(9), FieldElement::from(10u128)), ]) .into(); - let unconstrained_functions = vec![]; + let unconstrained_functions = vec![brillig_bytecode]; let mut acvm = ACVM::new(&StubbedBlackBoxSolver, &opcodes, witness_assignments, &unconstrained_functions); @@ -311,18 +315,7 @@ fn oracle_dependent_execution() { let w_x_inv = Witness(3); let w_y_inv = Witness(4); - let brillig_data = Brillig { - inputs: vec![ - BrilligInputs::Single(w_x.into()), // Input Register 0 - BrilligInputs::Single(Expression::default()), // Input Register 1 - BrilligInputs::Single(w_y.into()), // Input Register 2, - ], - outputs: vec![ - BrilligOutputs::Simple(w_x), // Output Register 0 - from input - BrilligOutputs::Simple(w_y_inv), // Output Register 1 - BrilligOutputs::Simple(w_y), // Output Register 2 - from input - BrilligOutputs::Simple(w_y_inv), // Output Register 3 - ], + let brillig_bytecode = BrilligBytecode { bytecode: vec![ BrilligOpcode::CalldataCopy { destination_address: MemoryAddress(0), @@ -346,7 +339,6 @@ fn oracle_dependent_execution() { }, BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 4 }, ], - predicate: None, }; // This equality check can be executed immediately before resolving any foreign calls. @@ -366,13 +358,27 @@ fn oracle_dependent_execution() { let opcodes = vec![ Opcode::AssertZero(equality_check), - Opcode::Brillig(brillig_data), + Opcode::BrilligCall { + id: 0, + inputs: vec![ + BrilligInputs::Single(w_x.into()), // Input Register 0 + BrilligInputs::Single(Expression::default()), // Input Register 1 + BrilligInputs::Single(w_y.into()), // Input Register 2, + ], + outputs: vec![ + BrilligOutputs::Simple(w_x), // Output Register 0 - from input + BrilligOutputs::Simple(w_y_inv), // Output Register 1 + BrilligOutputs::Simple(w_y), // Output Register 2 - from input + BrilligOutputs::Simple(w_y_inv), // Output Register 3 + ], + predicate: None, + }, Opcode::AssertZero(inverse_equality_check), ]; let witness_assignments = BTreeMap::from([(w_x, FieldElement::from(2u128)), (w_y, FieldElement::from(2u128))]).into(); - let unconstrained_functions = vec![]; + let unconstrained_functions = vec![brillig_bytecode]; let mut acvm = ACVM::new(&StubbedBlackBoxSolver, &opcodes, witness_assignments, &unconstrained_functions); @@ -436,21 +442,7 @@ fn brillig_oracle_predicate() { destination: MemoryAddress::from(2), }; - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![ - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(fe_1, w_x), (fe_1, w_y)], - q_c: fe_0, - }), - BrilligInputs::Single(Expression::default()), - ], - outputs: vec![ - BrilligOutputs::Simple(w_x_plus_y), - BrilligOutputs::Simple(w_oracle), - BrilligOutputs::Simple(w_equal_res), - BrilligOutputs::Simple(w_lt_res), - ], + let brillig_bytecode = BrilligBytecode { bytecode: vec![ BrilligOpcode::CalldataCopy { destination_address: MemoryAddress(0), @@ -467,17 +459,33 @@ fn brillig_oracle_predicate() { input_value_types: vec![HeapValueType::field()], }, ], - predicate: Some(Expression::default()), - }); + }; - let opcodes = vec![brillig_opcode]; + let opcodes = vec![Opcode::BrilligCall { + id: 0, + inputs: vec![ + BrilligInputs::Single(Expression { + mul_terms: vec![], + linear_combinations: vec![(fe_1, w_x), (fe_1, w_y)], + q_c: fe_0, + }), + BrilligInputs::Single(Expression::default()), + ], + outputs: vec![ + BrilligOutputs::Simple(w_x_plus_y), + BrilligOutputs::Simple(w_oracle), + BrilligOutputs::Simple(w_equal_res), + BrilligOutputs::Simple(w_lt_res), + ], + predicate: Some(Expression::default()), + }]; let witness_assignments = BTreeMap::from([ (Witness(1), FieldElement::from(2u128)), (Witness(2), FieldElement::from(3u128)), ]) .into(); - let unconstrained_functions = vec![]; + let unconstrained_functions = vec![brillig_bytecode]; let mut acvm = ACVM::new(&StubbedBlackBoxSolver, &opcodes, witness_assignments, &unconstrained_functions); let solver_status = acvm.solve(); @@ -557,23 +565,9 @@ fn unsatisfied_opcode_resolved_brillig() { let trap_opcode = BrilligOpcode::Trap { revert_data_offset: 0, revert_data_size: 0 }; let stop_opcode = BrilligOpcode::Stop { return_data_offset: 0, return_data_size: 0 }; - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![ - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(fe_1, w_x)], - q_c: fe_0, - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(fe_1, w_y)], - q_c: fe_0, - }), - ], - outputs: vec![BrilligOutputs::Simple(w_result)], + let brillig_bytecode = BrilligBytecode { bytecode: vec![calldata_copy_opcode, equal_opcode, jmp_if_opcode, trap_opcode, stop_opcode], - predicate: Some(Expression::one()), - }); + }; let opcode_a = Expression { mul_terms: vec![], @@ -595,8 +589,27 @@ fn unsatisfied_opcode_resolved_brillig() { values.insert(w_y, FieldElement::from(1_i128)); values.insert(w_result, FieldElement::from(0_i128)); - let opcodes = vec![brillig_opcode, Opcode::AssertZero(opcode_a)]; - let unconstrained_functions = vec![]; + let opcodes = vec![ + Opcode::BrilligCall { + id: 0, + inputs: vec![ + BrilligInputs::Single(Expression { + mul_terms: vec![], + linear_combinations: vec![(fe_1, w_x)], + q_c: fe_0, + }), + BrilligInputs::Single(Expression { + mul_terms: vec![], + linear_combinations: vec![(fe_1, w_y)], + q_c: fe_0, + }), + ], + outputs: vec![BrilligOutputs::Simple(w_result)], + predicate: Some(Expression::one()), + }, + Opcode::AssertZero(opcode_a), + ]; + let unconstrained_functions = vec![brillig_bytecode]; let mut acvm = ACVM::new(&StubbedBlackBoxSolver, &opcodes, values, &unconstrained_functions); let solver_status = acvm.solve(); assert_eq!( diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/shared/complex_foreign_call.ts b/noir/noir-repo/acvm-repo/acvm_js/test/shared/complex_foreign_call.ts index 722bae8e015..0d6fab0e1f3 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/shared/complex_foreign_call.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/shared/complex_foreign_call.ts @@ -2,13 +2,13 @@ import { WitnessMap } from '@noir-lang/acvm_js'; // See `complex_brillig_foreign_call` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 84, 75, 10, 132, 48, 12, 77, 218, 209, 145, 217, 205, 13, 6, 198, 3, 84, 79, - 224, 93, 196, 157, 162, 75, 79, 47, 22, 124, 197, 16, 186, 17, 43, 104, 32, 36, 109, 126, 143, 36, 45, 211, 70, 133, - 103, 134, 110, 61, 27, 232, 140, 179, 164, 224, 215, 64, 186, 115, 84, 113, 186, 92, 238, 42, 140, 230, 1, 24, 237, 5, - 24, 195, 62, 220, 116, 222, 41, 231, 146, 180, 127, 54, 242, 126, 94, 158, 51, 207, 57, 206, 111, 200, 2, 247, 4, 219, - 79, 245, 157, 132, 31, 137, 89, 52, 73, 176, 214, 46, 167, 125, 23, 89, 213, 254, 8, 156, 237, 56, 76, 125, 55, 91, - 229, 170, 161, 254, 133, 94, 42, 59, 171, 184, 69, 197, 46, 66, 202, 47, 40, 86, 39, 220, 155, 3, 185, 191, 180, 183, - 55, 163, 72, 98, 70, 66, 221, 251, 40, 173, 255, 35, 68, 62, 61, 5, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 84, 75, 10, 131, 64, 12, 77, 102, 90, 43, 221, 245, 6, 133, 246, 0, 211, 158, + 192, 187, 136, 59, 69, 151, 158, 94, 116, 48, 131, 241, 233, 70, 28, 65, 3, 195, 155, 79, 62, 47, 9, 25, 166, 81, 210, + 97, 177, 236, 239, 130, 70, 208, 223, 91, 154, 75, 208, 205, 4, 221, 62, 249, 113, 60, 95, 238, 40, 142, 230, 2, 28, + 237, 1, 28, 73, 245, 255, 132, 253, 142, 217, 151, 168, 245, 179, 43, 243, 115, 163, 113, 190, 18, 57, 63, 4, 83, 44, + 180, 55, 50, 180, 28, 188, 153, 224, 196, 122, 175, 111, 112, 68, 24, 65, 50, 204, 162, 100, 249, 119, 137, 226, 193, + 16, 251, 169, 50, 204, 235, 170, 41, 139, 214, 130, 42, 82, 253, 168, 253, 23, 222, 25, 236, 58, 176, 237, 20, 234, + 207, 107, 45, 78, 184, 55, 27, 124, 191, 104, 42, 111, 40, 121, 15, 94, 163, 77, 128, 65, 5, 0, 0, ]); export const initialWitnessMap: WitnessMap = new Map([ [1, '0x0000000000000000000000000000000000000000000000000000000000000001'], diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/shared/foreign_call.ts b/noir/noir-repo/acvm-repo/acvm_js/test/shared/foreign_call.ts index 0e3d77f62a9..3c66ba18629 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/shared/foreign_call.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/shared/foreign_call.ts @@ -2,10 +2,10 @@ import { WitnessMap } from '@noir-lang/acvm_js'; // See `simple_brillig_foreign_call` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 173, 144, 61, 10, 192, 32, 12, 133, 19, 11, 165, 116, 235, 77, 236, 13, 122, 153, - 14, 93, 58, 136, 120, 124, 241, 47, 129, 12, 42, 130, 126, 16, 18, 146, 16, 222, 11, 66, 225, 136, 129, 84, 111, 162, - 150, 112, 239, 161, 172, 231, 184, 113, 221, 45, 45, 245, 42, 242, 144, 216, 43, 250, 153, 83, 204, 191, 223, 189, - 198, 246, 92, 39, 60, 244, 63, 195, 59, 87, 99, 150, 165, 113, 83, 193, 0, 1, 19, 247, 29, 5, 160, 1, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 173, 144, 193, 10, 192, 32, 8, 134, 117, 99, 99, 236, 182, 55, 105, 111, 176, 151, + 217, 161, 75, 135, 136, 30, 63, 42, 82, 144, 8, 47, 245, 65, 252, 230, 47, 162, 34, 52, 174, 242, 144, 226, 131, 148, + 255, 18, 206, 125, 164, 102, 142, 23, 215, 245, 50, 114, 222, 173, 15, 80, 38, 65, 217, 108, 39, 61, 7, 30, 115, 11, + 223, 186, 248, 251, 160, 221, 170, 146, 64, 191, 39, 215, 60, 3, 47, 3, 99, 171, 188, 84, 164, 1, 0, 0, ]); export const initialWitnessMap: WitnessMap = new Map([ [1, '0x0000000000000000000000000000000000000000000000000000000000000005'], diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/shared/memory_op.ts b/noir/noir-repo/acvm-repo/acvm_js/test/shared/memory_op.ts index a69ae443259..20ea88c7130 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/shared/memory_op.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/shared/memory_op.ts @@ -1,9 +1,9 @@ // See `memory_op_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 81, 57, 14, 0, 32, 8, 147, 195, 255, 224, 15, 252, 255, 171, 212, 200, 208, - 129, 77, 24, 108, 66, 90, 150, 166, 20, 106, 23, 125, 143, 128, 62, 96, 103, 114, 173, 45, 198, 116, 182, 55, 140, - 106, 95, 74, 246, 149, 60, 47, 171, 46, 215, 126, 43, 87, 179, 111, 23, 8, 202, 176, 99, 248, 240, 9, 11, 137, 33, - 212, 110, 35, 3, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 82, 65, 10, 0, 32, 8, 203, 180, 255, 216, 15, 250, 255, 171, 10, 154, 16, 210, + 45, 61, 52, 144, 13, 132, 49, 135, 84, 54, 218, 26, 134, 22, 112, 5, 19, 180, 237, 61, 6, 88, 223, 208, 179, 125, 41, + 216, 151, 227, 188, 52, 187, 92, 253, 173, 92, 137, 190, 157, 143, 160, 254, 155, 45, 188, 148, 11, 38, 213, 237, 188, + 16, 35, 3, 0, 0, ]); export const initialWitnessMap = new Map([ diff --git a/noir/noir-repo/acvm-repo/acvm_js/test/shared/nested_acir_call.ts b/noir/noir-repo/acvm-repo/acvm_js/test/shared/nested_acir_call.ts index 4b73d01bb01..64051dff93f 100644 --- a/noir/noir-repo/acvm-repo/acvm_js/test/shared/nested_acir_call.ts +++ b/noir/noir-repo/acvm-repo/acvm_js/test/shared/nested_acir_call.ts @@ -2,13 +2,13 @@ import { WitnessMap, StackItem, WitnessStack } from '@noir-lang/acvm_js'; // See `nested_acir_call_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 205, 146, 65, 10, 3, 33, 12, 69, 163, 46, 230, 58, 137, 209, 49, 238, 122, 149, 74, - 157, 251, 31, 161, 83, 154, 161, 86, 132, 89, 212, 194, 124, 248, 24, 36, 132, 228, 241, 29, 188, 229, 212, 47, 45, - 187, 205, 110, 11, 31, 25, 53, 28, 255, 103, 77, 14, 58, 29, 141, 55, 125, 241, 55, 145, 109, 102, 49, 174, 33, 212, - 228, 43, 49, 221, 209, 231, 34, 17, 67, 44, 171, 144, 80, 148, 248, 240, 194, 92, 37, 72, 202, 37, 39, 204, 20, 184, - 210, 22, 51, 111, 58, 204, 205, 219, 11, 161, 129, 208, 214, 6, 6, 114, 29, 193, 127, 193, 130, 137, 176, 236, 188, - 189, 252, 162, 183, 218, 230, 238, 97, 138, 250, 152, 245, 245, 87, 220, 12, 140, 113, 95, 153, 170, 129, 185, 17, 60, - 3, 54, 212, 19, 104, 145, 195, 151, 14, 4, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 205, 146, 97, 10, 195, 32, 12, 133, 163, 66, 207, 147, 24, 173, 241, 223, 174, 50, + 153, 189, 255, 17, 214, 177, 148, 57, 17, 250, 99, 14, 250, 224, 97, 144, 16, 146, 143, 231, 224, 45, 167, 126, 105, + 217, 109, 118, 91, 248, 200, 168, 225, 248, 63, 107, 114, 208, 233, 104, 188, 233, 139, 191, 137, 108, 51, 139, 113, + 13, 161, 38, 95, 137, 233, 142, 62, 23, 137, 24, 98, 89, 133, 132, 162, 196, 135, 23, 230, 42, 65, 82, 46, 57, 97, + 166, 192, 149, 182, 152, 121, 211, 97, 110, 222, 94, 8, 13, 132, 182, 54, 48, 144, 235, 8, 254, 11, 22, 76, 132, 101, + 231, 237, 229, 23, 189, 213, 54, 119, 15, 83, 212, 199, 172, 175, 191, 226, 102, 96, 140, 251, 202, 84, 13, 204, 141, + 224, 25, 176, 161, 158, 53, 121, 144, 73, 14, 4, 0, 0, ]); export const initialWitnessMap: WitnessMap = new Map([ From b7900b88a66bfd9d75b92ed05a4236dda41b2013 Mon Sep 17 00:00:00 2001 From: just-mitch <68168980+just-mitch@users.noreply.github.com> Date: Thu, 25 Apr 2024 09:26:41 -0600 Subject: [PATCH 027/201] chore: lift run-e2e to yarn-project earthfile (#6018) can now `earthly +./yarn-project/run-e2e --test=e2e_token_contract` --- .github/workflows/ci.yml | 2 +- yarn-project/Earthfile | 7 ++++++- yarn-project/end-to-end/Earthfile | 5 ----- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ce28bee2c1a..fa73e81326b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -50,7 +50,7 @@ jobs: # (Note ARM uses just 2 tests as a smoketest) - name: Create list of end-to-end jobs id: e2e_list - run: echo "list=$(earthly ls ./yarn-project/end-to-end | grep -v -E '(\+base)|(\+e2e-test-single)' | sed 's/+//' | jq -R . | jq -cs .)" >> $GITHUB_OUTPUT + run: echo "list=$(earthly ls ./yarn-project/end-to-end | grep -v '+base' | sed 's/+//' | jq -R . | jq -cs .)" >> $GITHUB_OUTPUT # all the end-to-end integration tests for aztec e2e: diff --git a/yarn-project/Earthfile b/yarn-project/Earthfile index c582587cd7d..0821b63f8a1 100644 --- a/yarn-project/Earthfile +++ b/yarn-project/Earthfile @@ -100,4 +100,9 @@ format-check: test: FROM +build - RUN yarn test \ No newline at end of file + RUN yarn test + +run-e2e: + ARG test + FROM +end-to-end + RUN DEBUG=aztec:* yarn test $test diff --git a/yarn-project/end-to-end/Earthfile b/yarn-project/end-to-end/Earthfile index fd46ad8905b..59403a2400c 100644 --- a/yarn-project/end-to-end/Earthfile +++ b/yarn-project/end-to-end/Earthfile @@ -86,11 +86,6 @@ e2e-tests: FROM ../+end-to-end RUN yarn test ./src/e2e -e2e-test-single: - ARG test - FROM ../+end-to-end - RUN yarn test $test - flakey-e2e-tests: FROM ../+end-to-end RUN yarn test --passWithNoTests ./src/flakey || true From 12bfc15923ee4b7b57e50ac714953cb8129e7d5d Mon Sep 17 00:00:00 2001 From: josh crites Date: Thu, 25 Apr 2024 13:21:33 -0400 Subject: [PATCH 028/201] fix(docs): Fix admonition in contract class protocol spec (#6017) Fixes minor formatting issue that makes the entire page part of the admonition. --- docs/docs/protocol-specs/contract-deployment/classes.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/docs/protocol-specs/contract-deployment/classes.md b/docs/docs/protocol-specs/contract-deployment/classes.md index 6f80a44aa96..d0ffd34a24f 100644 --- a/docs/docs/protocol-specs/contract-deployment/classes.md +++ b/docs/docs/protocol-specs/contract-deployment/classes.md @@ -12,6 +12,7 @@ Read the following discussions for additional context: - [Abstracting contract deployment](https://forum.aztec.network/t/proposal-abstracting-contract-deployment/2576) - [Implementing contract upgrades](https://forum.aztec.network/t/implementing-contract-upgrades/2570) - [Contract classes, upgrades, and default accounts](https://forum.aztec.network/t/contract-classes-upgrades-and-default-accounts/433) + ::: ## `ContractClass` From 0881cd3083af70271bceda695d0c8ad21212c172 Mon Sep 17 00:00:00 2001 From: esau <152162806+sklppy88@users.noreply.github.com> Date: Thu, 25 Apr 2024 19:41:04 +0200 Subject: [PATCH 029/201] feat: add key registry to deployment (e2e & sandbox) (#5875) Resolves #5611 --- .../src/core/libraries/ConstantsGen.sol | 4 +- .../key_registry_contract/src/main.nr | 8 --- .../crates/types/src/constants.nr | 4 +- .../types/src/tests/fixtures/contracts.nr | 14 ++--- yarn-project/aztec/src/sandbox.ts | 42 +++++++++++++- yarn-project/circuits.js/src/constants.gen.ts | 3 +- .../circuits.js/src/contract/artifact_hash.ts | 12 +++- .../src/contract/contract_class_id.test.ts | 2 +- .../src/contract/contract_class_id.ts | 9 +-- .../contract_class_registered_event.test.ts | 3 +- .../benchmarks/bench_process_history.test.ts | 2 +- .../src/e2e_counter_contract.test.ts | 2 +- .../end-to-end/src/e2e_encryption.test.ts | 2 +- .../end-to-end/src/e2e_key_registry.test.ts | 5 +- .../src/e2e_max_block_number.test.ts | 2 +- .../src/e2e_private_voting_contract.test.ts | 2 +- .../e2e_public_cross_chain_messaging.test.ts | 2 +- yarn-project/end-to-end/src/fixtures/utils.ts | 57 +++++++++++++++++-- .../__snapshots__/noir_test_gen.test.ts.snap | 16 +++--- .../scripts/copy-contracts.sh | 1 + .../src/key-registry/artifact.ts | 6 ++ .../src/key-registry/index.test.ts | 17 ++++++ .../src/key-registry/index.ts | 22 +++++++ .../pxe/src/pxe_service/create_pxe_service.ts | 2 + 24 files changed, 194 insertions(+), 45 deletions(-) create mode 100644 yarn-project/protocol-contracts/src/key-registry/artifact.ts create mode 100644 yarn-project/protocol-contracts/src/key-registry/index.test.ts create mode 100644 yarn-project/protocol-contracts/src/key-registry/index.ts diff --git a/l1-contracts/src/core/libraries/ConstantsGen.sol b/l1-contracts/src/core/libraries/ConstantsGen.sol index f93ab6ce221..e491271a5ca 100644 --- a/l1-contracts/src/core/libraries/ConstantsGen.sol +++ b/l1-contracts/src/core/libraries/ConstantsGen.sol @@ -89,11 +89,13 @@ library Constants { uint256 internal constant DEPLOYER_CONTRACT_INSTANCE_DEPLOYED_MAGIC_VALUE = 0x85864497636cf755ae7bde03f267ce01a520981c21c3682aaf82a631; uint256 internal constant DEPLOYER_CONTRACT_ADDRESS = - 0x1236d27f14d2934fd666beff34a0b4b746949f5d51a149eb67f908eb95092f54; + 0x0097949bb96834550868230a1b6cc242d1f662f7c52946245e4e73da1b8b2165; uint256 internal constant DEFAULT_GAS_LIMIT = 1_000_000_000; uint256 internal constant DEFAULT_TEARDOWN_GAS_LIMIT = 100_000_000; uint256 internal constant DEFAULT_MAX_FEE_PER_GAS = 10; uint256 internal constant DEFAULT_INCLUSION_FEE = 0; + uint256 internal constant CANONICAL_KEY_REGISTRY_ADDRESS = + 0x1585e564a60e6ec974bc151b62705292ebfc75c33341986a47fd9749cedb567e; uint256 internal constant AZTEC_ADDRESS_LENGTH = 1; uint256 internal constant GAS_FEES_LENGTH = 3; uint256 internal constant GAS_LENGTH = 3; diff --git a/noir-projects/noir-contracts/contracts/key_registry_contract/src/main.nr b/noir-projects/noir-contracts/contracts/key_registry_contract/src/main.nr index d4ed6addd03..1d07a431e24 100644 --- a/noir-projects/noir-contracts/contracts/key_registry_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/key_registry_contract/src/main.nr @@ -2,17 +2,13 @@ contract KeyRegistry { use dep::authwit::auth::assert_current_call_valid_authwit_public; use dep::aztec::{ - context::gas::GasOpts, state_vars::{ SharedMutable, Map }, protocol_types::{ - abis::function_selector::FunctionSelector, - contract_class_id::ContractClassId, address::{ AztecAddress, - EthAddress, PublicKeysHash, PartialAddress, }, @@ -21,10 +17,6 @@ contract KeyRegistry { GENERATOR_INDEX__PUBLIC_KEYS_HASH }, hash::poseidon2_hash, - traits::{ - Serialize, - Deserialize, - } }, }; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr index 10649ef22cf..9f454eaeefe 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/constants.nr @@ -126,7 +126,7 @@ global REGISTERER_UNCONSTRAINED_FUNCTION_BROADCASTED_MAGIC_VALUE = 0xe7af8166354 // CONTRACT INSTANCE CONSTANTS // sha224sum 'struct ContractInstanceDeployed' global DEPLOYER_CONTRACT_INSTANCE_DEPLOYED_MAGIC_VALUE = 0x85864497636cf755ae7bde03f267ce01a520981c21c3682aaf82a631; -global DEPLOYER_CONTRACT_ADDRESS = 0x1236d27f14d2934fd666beff34a0b4b746949f5d51a149eb67f908eb95092f54; +global DEPLOYER_CONTRACT_ADDRESS = 0x0097949bb96834550868230a1b6cc242d1f662f7c52946245e4e73da1b8b2165; // GAS DEFAULTS global DEFAULT_GAS_LIMIT: u32 = 1_000_000_000; @@ -134,6 +134,8 @@ global DEFAULT_TEARDOWN_GAS_LIMIT: u32 = 100_000_000; global DEFAULT_MAX_FEE_PER_GAS: Field = 10; global DEFAULT_INCLUSION_FEE: Field = 0; +global CANONICAL_KEY_REGISTRY_ADDRESS = 0x1585e564a60e6ec974bc151b62705292ebfc75c33341986a47fd9749cedb567e; + // LENGTH OF STRUCTS SERIALIZED TO FIELDS global AZTEC_ADDRESS_LENGTH = 1; global GAS_FEES_LENGTH: u64 = 3; diff --git a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixtures/contracts.nr b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixtures/contracts.nr index bc9aec40b02..1ca20064484 100644 --- a/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixtures/contracts.nr +++ b/noir-projects/noir-protocol-circuits/crates/types/src/tests/fixtures/contracts.nr @@ -19,13 +19,13 @@ struct ContractData { global default_contract = ContractData { contract_address_salt: 0x000000000000000000000000000000000000000000000000000000000000ddd5, artifact_hash: 0x0000000000000000000000000000000000000000000000000000000000003039, - public_bytecode_commitment: 0x129a3438653fe147133b2c274757920e37896305e7664c8c1eb380be3efd5fed, + public_bytecode_commitment: 0x0000000000000000000000000000000000000000000000000000000000000005, private_functions_root: 0x19a3cc0b714976fb35d58b684ba36e86f82bac8b87517904a2727e5113fb4cba, - address: AztecAddress { inner: 0x2d941148ee5adeece35991d32acbcf4200742991c61990dee965bedf729d21a9 }, - partial_address: PartialAddress { inner: 0x23a6933a485200a8d34b9929d61868c9635793f878d67ce86a1b1355c0ab0d47 }, - contract_class_id: ContractClassId { inner: 0x0ce2a998337b1e6da1ac1d802a8bb9e10b7d705d210e61efb9642855009814a6 }, + address: AztecAddress { inner: 0x24e2561e4216c843ff11bf77d4f8a68247e537980273ce54b09b505f7352f6bb }, + partial_address: PartialAddress { inner: 0x27ab8475fe4647b48ffc4df7a6cc42bf1125f000ff113d9a0d6a11b952626761 }, + contract_class_id: ContractClassId { inner: 0x29f4bda24f38507064a90f7505dc0381c9d83c97271c5c2e92a4261d300861bf }, public_keys_hash: PublicKeysHash { inner: 0x000000000000000000000000000000000000000000000000000000000000b26e }, - salted_initialization_hash: SaltedInitializationHash { inner: 0x0b095458845137ebf1e6061c8c0ba1d907241a3b56dc1d3e73d2fea78f04a036 }, + salted_initialization_hash: SaltedInitializationHash { inner: 0x25765504545d2cdaaa6544eb24bc78a3e20384452f2525669f196a1a42f45906 }, deployer: AztecAddress { inner: 0x0000000000000000000000000000000000000000000000000000000000000000 } }; @@ -33,11 +33,11 @@ global default_contract = ContractData { global parent_contract = ContractData { contract_address_salt: 0x0000000000000000000000000000000000000000000000000000000000001618, artifact_hash: 0x00000000000000000000000000000000000000000000000000000000000004bc, - public_bytecode_commitment: 0x1435ed970b275bebf95de3df53f23f3d2e97c9b54cf442bb03a3fa17a0ee3cd7, + public_bytecode_commitment: 0x0000000000000000000000000000000000000000000000000000000000000005, private_functions_root: 0x2c1c949cb226995de94b7b8b5aeaab440739f2dfeb06d358441f60932cf243a7, address: AztecAddress { inner: 0x24692d7dbb532557c7466e8782d1fe99077e4787570414bd1a5e8fa5300caad8 }, partial_address: PartialAddress { inner: 0x127bbd73a3cf497fb2d85342571695d894985b449a9343eec55485e9cbc514f8 }, - contract_class_id: ContractClassId { inner: 0x1f1f963a350e2c883cc6730c19fc5d5b47a40694d805cbb0720fa76fe295df90 }, + contract_class_id: ContractClassId { inner: 0x037a09515a79a2b8ebe5139dae1ab7c433523ac1fd5631836890df2148df51c7 }, public_keys_hash: PublicKeysHash { inner: 0x00000000000000000000000000000000000000000000000000000000000011c1 }, salted_initialization_hash: SaltedInitializationHash { inner: 0x04643e65513869350552499ed3412df59540dffe3cd698203deee8900b53bcec }, deployer: AztecAddress { inner: 0x0000000000000000000000000000000000000000000000000000000000000000 } diff --git a/yarn-project/aztec/src/sandbox.ts b/yarn-project/aztec/src/sandbox.ts index 18d2f792c23..9b7f52ffec1 100644 --- a/yarn-project/aztec/src/sandbox.ts +++ b/yarn-project/aztec/src/sandbox.ts @@ -1,8 +1,9 @@ #!/usr/bin/env -S node --no-warnings import { type AztecNodeConfig, AztecNodeService, getConfigEnvVars } from '@aztec/aztec-node'; -import { type AztecAddress, SignerlessWallet, type Wallet } from '@aztec/aztec.js'; +import { AztecAddress, SignerlessWallet, type Wallet } from '@aztec/aztec.js'; import { DefaultMultiCallEntrypoint } from '@aztec/aztec.js/entrypoint'; import { type AztecNode } from '@aztec/circuit-types'; +import { CANONICAL_KEY_REGISTRY_ADDRESS } from '@aztec/circuits.js'; import { type DeployL1Contracts, type L1ContractAddresses, @@ -30,7 +31,9 @@ import { RollupBytecode, } from '@aztec/l1-artifacts'; import { GasTokenContract } from '@aztec/noir-contracts.js/GasToken'; +import { KeyRegistryContract } from '@aztec/noir-contracts.js/KeyRegistry'; import { getCanonicalGasToken } from '@aztec/protocol-contracts/gas-token'; +import { getCanonicalKeyRegistry } from '@aztec/protocol-contracts/key-registry'; import { type PXEServiceConfig, createPXEService, getPXEServiceConfig } from '@aztec/pxe'; import { @@ -185,6 +188,39 @@ async function deployCanonicalL2GasToken(deployer: Wallet, l1ContractAddresses: logger.info(`Deployed Gas Token on L2 at ${canonicalGasToken.address}`); } +/** + * Deploys the key registry on L2. + */ +async function deployCanonicalKeyRegistry(deployer: Wallet) { + const canonicalKeyRegistry = getCanonicalKeyRegistry(); + + // We check to see if there exists a contract at the canonical Key Registry address with the same contract class id as we expect. This means that + // the key registry has already been deployed to the correct address. + if ( + (await deployer.getContractInstance(canonicalKeyRegistry.address))?.contractClassId.equals( + canonicalKeyRegistry.contractClass.id, + ) && + (await deployer.isContractClassPubliclyRegistered(canonicalKeyRegistry.contractClass.id)) + ) { + return; + } + + const keyRegistry = await KeyRegistryContract.deploy(deployer) + .send({ contractAddressSalt: canonicalKeyRegistry.instance.salt, universalDeploy: true }) + .deployed(); + + if ( + !keyRegistry.address.equals(canonicalKeyRegistry.address) || + !keyRegistry.address.equals(AztecAddress.fromBigInt(CANONICAL_KEY_REGISTRY_ADDRESS)) + ) { + throw new Error( + `Deployed Key Registry address ${keyRegistry.address} does not match expected address ${canonicalKeyRegistry.address}, or they both do not equal CANONICAL_KEY_REGISTRY_ADDRESS`, + ); + } + + logger.info(`Deployed Key Registry on L2 at ${canonicalKeyRegistry.address}`); +} + /** Sandbox settings. */ export type SandboxConfig = AztecNodeConfig & { /** Mnemonic used to derive the L1 deployer private key.*/ @@ -213,6 +249,10 @@ export async function createSandbox(config: Partial = {}) { const node = await createAztecNode(aztecNodeConfig); const pxe = await createAztecPXE(node); + await deployCanonicalKeyRegistry( + new SignerlessWallet(pxe, new DefaultMultiCallEntrypoint(aztecNodeConfig.chainId, aztecNodeConfig.version)), + ); + if (config.enableGas) { await deployCanonicalL2GasToken( new SignerlessWallet(pxe, new DefaultMultiCallEntrypoint(aztecNodeConfig.chainId, aztecNodeConfig.version)), diff --git a/yarn-project/circuits.js/src/constants.gen.ts b/yarn-project/circuits.js/src/constants.gen.ts index f141aa31070..bdac9c533f0 100644 --- a/yarn-project/circuits.js/src/constants.gen.ts +++ b/yarn-project/circuits.js/src/constants.gen.ts @@ -74,11 +74,12 @@ export const REGISTERER_UNCONSTRAINED_FUNCTION_BROADCASTED_MAGIC_VALUE = 0xe7af816635466f128568edb04c9fa024f6c87fb9010fdbffa68b3d99n; export const DEPLOYER_CONTRACT_INSTANCE_DEPLOYED_MAGIC_VALUE = 0x85864497636cf755ae7bde03f267ce01a520981c21c3682aaf82a631n; -export const DEPLOYER_CONTRACT_ADDRESS = 0x1236d27f14d2934fd666beff34a0b4b746949f5d51a149eb67f908eb95092f54n; +export const DEPLOYER_CONTRACT_ADDRESS = 0x0097949bb96834550868230a1b6cc242d1f662f7c52946245e4e73da1b8b2165n; export const DEFAULT_GAS_LIMIT = 1_000_000_000; export const DEFAULT_TEARDOWN_GAS_LIMIT = 100_000_000; export const DEFAULT_MAX_FEE_PER_GAS = 10; export const DEFAULT_INCLUSION_FEE = 0; +export const CANONICAL_KEY_REGISTRY_ADDRESS = 0x1585e564a60e6ec974bc151b62705292ebfc75c33341986a47fd9749cedb567en; export const AZTEC_ADDRESS_LENGTH = 1; export const GAS_FEES_LENGTH = 3; export const GAS_LENGTH = 3; diff --git a/yarn-project/circuits.js/src/contract/artifact_hash.ts b/yarn-project/circuits.js/src/contract/artifact_hash.ts index 58be0f2437f..f4e594854dc 100644 --- a/yarn-project/circuits.js/src/contract/artifact_hash.ts +++ b/yarn-project/circuits.js/src/contract/artifact_hash.ts @@ -59,8 +59,18 @@ export function computeArtifactHashPreimage(artifact: ContractArtifact) { } export function computeArtifactMetadataHash(artifact: ContractArtifact) { - // TODO(@spalladino): Should we use the sorted event selectors instead? They'd need to be unique for that. + // TODO: #6021: Should we use the sorted event selectors instead? They'd need to be unique for that. + // Response - The output selectors need to be sorted, because if not noir makes no guarantees on the order of outputs for some reason + const metadata = { name: artifact.name, outputs: artifact.outputs }; + + // This is a temporary workaround for the Key Registry + // TODO: #6021 We need to make sure the artifact is deterministic from any specific compiler run. This relates to selectors not being sorted and being + // apparently random in the order they appear after compiled w/ nargo. We can try to sort this upon loading an artifact. + if (artifact.name === 'KeyRegistry') { + return sha256Fr(Buffer.from(JSON.stringify({ name: artifact.name }), 'utf-8')); + } + return sha256Fr(Buffer.from(JSON.stringify(metadata), 'utf-8')); } diff --git a/yarn-project/circuits.js/src/contract/contract_class_id.test.ts b/yarn-project/circuits.js/src/contract/contract_class_id.test.ts index 7f40b3029a0..a0b1e64a421 100644 --- a/yarn-project/circuits.js/src/contract/contract_class_id.test.ts +++ b/yarn-project/circuits.js/src/contract/contract_class_id.test.ts @@ -25,7 +25,7 @@ describe('ContractClass', () => { }; expect(computeContractClassId(contractClass).toString()).toMatchInlineSnapshot( - `"0x2f4c56801b35e01081aeb1b2bd07eba0f8d55de625ec1e957347eedaea1669bb"`, + `"0x0fd34f4f2d6d6a7fc61d8fb8e0c9a411354856fa86c568e4c9e0935b367dc69d"`, ); }); }); diff --git a/yarn-project/circuits.js/src/contract/contract_class_id.ts b/yarn-project/circuits.js/src/contract/contract_class_id.ts index 86020074ed6..89f4081ae8c 100644 --- a/yarn-project/circuits.js/src/contract/contract_class_id.ts +++ b/yarn-project/circuits.js/src/contract/contract_class_id.ts @@ -1,4 +1,4 @@ -import { pedersenHash, sha256 } from '@aztec/foundation/crypto'; +import { pedersenHash } from '@aztec/foundation/crypto'; import { Fr } from '@aztec/foundation/fields'; import { type ContractClass } from '@aztec/types/contracts'; @@ -56,7 +56,8 @@ export type ContractClassIdPreimage = { publicBytecodeCommitment: Fr; }; -// TODO(@spalladino): Replace with actual implementation -export function computePublicBytecodeCommitment(bytecode: Buffer) { - return Fr.fromBufferReduce(sha256(bytecode)); +// TODO(#5860): Replace with actual implementation +// Changed to work with canonical contracts that may have non-deterministic noir compiles and we want to keep the address constant +export function computePublicBytecodeCommitment(_bytecode: Buffer) { + return new Fr(5); } diff --git a/yarn-project/circuits.js/src/contract/events/contract_class_registered_event.test.ts b/yarn-project/circuits.js/src/contract/events/contract_class_registered_event.test.ts index dc2819e4852..4fec2f49d4a 100644 --- a/yarn-project/circuits.js/src/contract/events/contract_class_registered_event.test.ts +++ b/yarn-project/circuits.js/src/contract/events/contract_class_registered_event.test.ts @@ -11,8 +11,9 @@ describe('ContractClassRegisteredEvent', () => { ); expect(event.artifactHash.toString()).toEqual('0x072dce903b1a299d6820eeed695480fe9ec46658b1101885816aed6dd86037f0'); expect(event.packedPublicBytecode.length).toEqual(27090); + // TODO: #5860 expect(computePublicBytecodeCommitment(event.packedPublicBytecode).toString()).toEqual( - '0x1d5c54998c08cee8ad4a8af5740f2e844fe6db3a5bb4b6382a48b2daeabeee3f', + '0x0000000000000000000000000000000000000000000000000000000000000005', ); }); }); diff --git a/yarn-project/end-to-end/src/benchmarks/bench_process_history.test.ts b/yarn-project/end-to-end/src/benchmarks/bench_process_history.test.ts index 9654c018449..22d11429873 100644 --- a/yarn-project/end-to-end/src/benchmarks/bench_process_history.test.ts +++ b/yarn-project/end-to-end/src/benchmarks/bench_process_history.test.ts @@ -21,7 +21,7 @@ import { const BLOCK_SIZE = BENCHMARK_HISTORY_BLOCK_SIZE; const CHAIN_LENGTHS = BENCHMARK_HISTORY_CHAIN_LENGTHS; const MAX_CHAIN_LENGTH = CHAIN_LENGTHS[CHAIN_LENGTHS.length - 1]; -const SETUP_BLOCK_COUNT = 2; // deploy account + deploy contract +const SETUP_BLOCK_COUNT = 3; // deploy protocol contracts + deploy account + deploy contract describe('benchmarks/process_history', () => { let context: EndToEndContext; diff --git a/yarn-project/end-to-end/src/e2e_counter_contract.test.ts b/yarn-project/end-to-end/src/e2e_counter_contract.test.ts index f2d75506afb..ec7394bb434 100644 --- a/yarn-project/end-to-end/src/e2e_counter_contract.test.ts +++ b/yarn-project/end-to-end/src/e2e_counter_contract.test.ts @@ -19,7 +19,7 @@ describe('e2e_counter_contract', () => { counterContract = await CounterContract.deploy(wallet, 0, owner).send().deployed(); logger.info(`Counter contract deployed at ${counterContract.address}`); - }, 25_000); + }, 45_000); afterAll(() => teardown()); diff --git a/yarn-project/end-to-end/src/e2e_encryption.test.ts b/yarn-project/end-to-end/src/e2e_encryption.test.ts index 9206c566aea..7734c66ee46 100644 --- a/yarn-project/end-to-end/src/e2e_encryption.test.ts +++ b/yarn-project/end-to-end/src/e2e_encryption.test.ts @@ -17,7 +17,7 @@ describe('e2e_encryption', () => { beforeAll(async () => { ({ teardown, wallet } = await setup()); contract = await TestContract.deploy(wallet).send().deployed(); - }, 25_000); + }, 45_000); afterAll(() => teardown()); diff --git a/yarn-project/end-to-end/src/e2e_key_registry.test.ts b/yarn-project/end-to-end/src/e2e_key_registry.test.ts index 37f6ac8cea5..b3dccd1b34b 100644 --- a/yarn-project/end-to-end/src/e2e_key_registry.test.ts +++ b/yarn-project/end-to-end/src/e2e_key_registry.test.ts @@ -2,6 +2,7 @@ import { type AccountWallet, AztecAddress, Fr, type PXE } from '@aztec/aztec.js' import { GeneratorIndex } from '@aztec/circuits.js'; import { poseidon2Hash } from '@aztec/foundation/crypto'; import { KeyRegistryContract, TestContract } from '@aztec/noir-contracts.js'; +import { getCanonicalKeyRegistryAddress } from '@aztec/protocol-contracts/key-registry'; import { jest } from '@jest/globals'; @@ -11,6 +12,7 @@ const TIMEOUT = 100_000; describe('SharedMutablePrivateGetter', () => { let keyRegistry: KeyRegistryContract; + let testContract: TestContract; let pxe: PXE; jest.setTimeout(TIMEOUT); @@ -21,8 +23,9 @@ describe('SharedMutablePrivateGetter', () => { beforeAll(async () => { ({ teardown, pxe, wallets } = await setup(2)); + keyRegistry = await KeyRegistryContract.at(getCanonicalKeyRegistryAddress(), wallets[0]); + testContract = await TestContract.deploy(wallets[0]).send().deployed(); - keyRegistry = await KeyRegistryContract.deploy(wallets[0]).send().deployed(); await publicDeployAccounts(wallets[0], wallets.slice(0, 2)); }, 120_000); diff --git a/yarn-project/end-to-end/src/e2e_max_block_number.test.ts b/yarn-project/end-to-end/src/e2e_max_block_number.test.ts index 7000bcd5df7..56cc4e4f10e 100644 --- a/yarn-project/end-to-end/src/e2e_max_block_number.test.ts +++ b/yarn-project/end-to-end/src/e2e_max_block_number.test.ts @@ -13,7 +13,7 @@ describe('e2e_max_block_number', () => { beforeAll(async () => { ({ teardown, wallet, pxe } = await setup()); contract = await TestContract.deploy(wallet).send().deployed(); - }, 25_000); + }, 45_000); afterAll(() => teardown()); diff --git a/yarn-project/end-to-end/src/e2e_private_voting_contract.test.ts b/yarn-project/end-to-end/src/e2e_private_voting_contract.test.ts index 71c1920c5ce..15f537643a1 100644 --- a/yarn-project/end-to-end/src/e2e_private_voting_contract.test.ts +++ b/yarn-project/end-to-end/src/e2e_private_voting_contract.test.ts @@ -20,7 +20,7 @@ describe('e2e_voting_contract', () => { votingContract = await EasyPrivateVotingContract.deploy(wallet, owner).send().deployed(); logger.info(`Counter contract deployed at ${votingContract.address}`); - }, 25_000); + }, 45_000); afterAll(() => teardown()); diff --git a/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging.test.ts b/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging.test.ts index 6108e156ed3..3edfd40cc7b 100644 --- a/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging.test.ts +++ b/yarn-project/end-to-end/src/e2e_public_cross_chain_messaging.test.ts @@ -51,7 +51,7 @@ describe('e2e_public_cross_chain_messaging', () => { user1Wallet = wallets[0]; user2Wallet = wallets[1]; await publicDeployAccounts(wallets[0], wallets.slice(0, 2)); - }, 30_000); + }, 45_000); beforeEach(async () => { crossChainTestHarness = await CrossChainTestHarness.new( diff --git a/yarn-project/end-to-end/src/fixtures/utils.ts b/yarn-project/end-to-end/src/fixtures/utils.ts index bcfcf5a2aae..df58258d1f6 100644 --- a/yarn-project/end-to-end/src/fixtures/utils.ts +++ b/yarn-project/end-to-end/src/fixtures/utils.ts @@ -3,7 +3,7 @@ import { createAccounts, getDeployedTestAccountsWallets } from '@aztec/accounts/ import { type AztecNodeConfig, AztecNodeService, getConfigEnvVars } from '@aztec/aztec-node'; import { type AccountWalletWithSecretKey, - type AztecAddress, + AztecAddress, type AztecNode, BatchCall, CheatCodes, @@ -28,6 +28,11 @@ import { } from '@aztec/aztec.js'; import { deployInstance, registerContractClass } from '@aztec/aztec.js/deployment'; import { DefaultMultiCallEntrypoint } from '@aztec/aztec.js/entrypoint'; +import { + CANONICAL_KEY_REGISTRY_ADDRESS, + computeContractAddressFromInstance, + getContractClassFromArtifact, +} from '@aztec/circuits.js'; import { randomBytes } from '@aztec/foundation/crypto'; import { makeBackoff, retry } from '@aztec/foundation/retry'; import { @@ -46,8 +51,10 @@ import { RollupAbi, RollupBytecode, } from '@aztec/l1-artifacts'; +import { KeyRegistryContract } from '@aztec/noir-contracts.js'; import { GasTokenContract } from '@aztec/noir-contracts.js/GasToken'; import { getCanonicalGasToken, getCanonicalGasTokenAddress } from '@aztec/protocol-contracts/gas-token'; +import { getCanonicalKeyRegistry } from '@aztec/protocol-contracts/key-registry'; import { PXEService, type PXEServiceConfig, createPXEService, getPXEServiceConfig } from '@aztec/pxe'; import { type SequencerClient } from '@aztec/sequencer-client'; @@ -270,10 +277,14 @@ async function setupWithRemoteEnvironment( const cheatCodes = CheatCodes.create(config.rpcUrl, pxeClient!); const teardown = () => Promise.resolve(); + const { chainId, protocolVersion } = await pxeClient.getNodeInfo(); + // this contract might already have been deployed + // the following deployin functions are idempotent + await deployCanonicalKeyRegistry( + new SignerlessWallet(pxeClient, new DefaultMultiCallEntrypoint(chainId, protocolVersion)), + ); + if (enableGas) { - const { chainId, protocolVersion } = await pxeClient.getNodeInfo(); - // this contract might already have been deployed - // the following function is idempotent await deployCanonicalGasToken( new SignerlessWallet(pxeClient, new DefaultMultiCallEntrypoint(chainId, protocolVersion)), ); @@ -407,6 +418,11 @@ export async function setup( logger.verbose('Creating a pxe...'); const { pxe, wallets } = await setupPXEService(numberOfAccounts, aztecNode!, pxeOpts, logger); + logger.verbose('Deploying key registry...'); + await deployCanonicalKeyRegistry( + new SignerlessWallet(pxe, new DefaultMultiCallEntrypoint(config.chainId, config.version)), + ); + if (enableGas) { logger.verbose('Deploying gas token...'); await deployCanonicalGasToken( @@ -595,3 +611,36 @@ export async function deployCanonicalGasToken(deployer: Wallet) { await expect(deployer.getContractInstance(gasToken.address)).resolves.toBeDefined(); await expect(deployer.isContractPubliclyDeployed(gasToken.address)).resolves.toBe(true); } + +async function deployCanonicalKeyRegistry(deployer: Wallet) { + const canonicalKeyRegistry = getCanonicalKeyRegistry(); + + // We check to see if there exists a contract at the canonical Key Registry address with the same contract class id as we expect. This means that + // the key registry has already been deployed to the correct address. + if ( + (await deployer.getContractInstance(canonicalKeyRegistry.address))?.contractClassId.equals( + canonicalKeyRegistry.contractClass.id, + ) && + (await deployer.isContractClassPubliclyRegistered(canonicalKeyRegistry.contractClass.id)) + ) { + return; + } + + const keyRegistry = await KeyRegistryContract.deploy(deployer) + .send({ contractAddressSalt: canonicalKeyRegistry.instance.salt, universalDeploy: true }) + .deployed(); + + if ( + !keyRegistry.address.equals(canonicalKeyRegistry.address) || + !keyRegistry.address.equals(AztecAddress.fromBigInt(CANONICAL_KEY_REGISTRY_ADDRESS)) + ) { + throw new Error( + `Deployed Key Registry address ${keyRegistry.address} does not match expected address ${canonicalKeyRegistry.address}, or they both do not equal CANONICAL_KEY_REGISTRY_ADDRESS`, + ); + } + + expect(computeContractAddressFromInstance(keyRegistry.instance)).toEqual(keyRegistry.address); + expect(getContractClassFromArtifact(keyRegistry.artifact).id).toEqual(keyRegistry.instance.contractClassId); + await expect(deployer.isContractClassPubliclyRegistered(canonicalKeyRegistry.contractClass.id)).resolves.toBe(true); + await expect(deployer.getContractInstance(canonicalKeyRegistry.instance.address)).resolves.toBeDefined(); +} diff --git a/yarn-project/noir-protocol-circuits-types/src/__snapshots__/noir_test_gen.test.ts.snap b/yarn-project/noir-protocol-circuits-types/src/__snapshots__/noir_test_gen.test.ts.snap index c1eac3dcf71..4210151e650 100644 --- a/yarn-project/noir-protocol-circuits-types/src/__snapshots__/noir_test_gen.test.ts.snap +++ b/yarn-project/noir-protocol-circuits-types/src/__snapshots__/noir_test_gen.test.ts.snap @@ -4364,11 +4364,11 @@ exports[`Data generation for noir tests Computes contract info for defaultContra "{ contract_address_salt: 0x000000000000000000000000000000000000000000000000000000000000ddd5, artifact_hash: 0x0000000000000000000000000000000000000000000000000000000000003039, - public_bytecode_commitment: 0x129a3438653fe147133b2c274757920e37896305e7664c8c1eb380be3efd5fed, + public_bytecode_commitment: 0x0000000000000000000000000000000000000000000000000000000000000005, private_functions_root: 0x19a3cc0b714976fb35d58b684ba36e86f82bac8b87517904a2727e5113fb4cba, - address: AztecAddress { inner: 0x0efbcdd92da9729c276193e01399cd12a30c2ea0774feec95de486cb3986c0a0 }, - partial_address: PartialAddress { inner: 0x30248868d815221789c5f173462ee99637ac9748000d31c5f311dbc3d996eb71 }, - contract_class_id: ContractClassId { inner: 0x0ce2a998337b1e6da1ac1d802a8bb9e10b7d705d210e61efb9642855009814a6 }, + address: AztecAddress { inner: 0x24e2561e4216c843ff11bf77d4f8a68247e537980273ce54b09b505f7352f6bb }, + partial_address: PartialAddress { inner: 0x27ab8475fe4647b48ffc4df7a6cc42bf1125f000ff113d9a0d6a11b952626761 }, + contract_class_id: ContractClassId { inner: 0x29f4bda24f38507064a90f7505dc0381c9d83c97271c5c2e92a4261d300861bf }, public_keys_hash: PublicKeysHash { inner: 0x000000000000000000000000000000000000000000000000000000000000b26e }, salted_initialization_hash: SaltedInitializationHash { inner: 0x25765504545d2cdaaa6544eb24bc78a3e20384452f2525669f196a1a42f45906 }, deployer: AztecAddress { inner: 0x0000000000000000000000000000000000000000000000000000000000000000 } @@ -4379,11 +4379,11 @@ exports[`Data generation for noir tests Computes contract info for parentContrac "{ contract_address_salt: 0x0000000000000000000000000000000000000000000000000000000000001618, artifact_hash: 0x00000000000000000000000000000000000000000000000000000000000004bc, - public_bytecode_commitment: 0x1435ed970b275bebf95de3df53f23f3d2e97c9b54cf442bb03a3fa17a0ee3cd7, + public_bytecode_commitment: 0x0000000000000000000000000000000000000000000000000000000000000005, private_functions_root: 0x2c1c949cb226995de94b7b8b5aeaab440739f2dfeb06d358441f60932cf243a7, - address: AztecAddress { inner: 0x2dbb8c3db287eef6758cc13b1702ff6c5a4b534a8d0f08d76106e28aa0f4bd3c }, - partial_address: PartialAddress { inner: 0x20ac96f5da24137797077661d4222c8caf97d2d3fdeadbf4cad8f529a96eb610 }, - contract_class_id: ContractClassId { inner: 0x1f1f963a350e2c883cc6730c19fc5d5b47a40694d805cbb0720fa76fe295df90 }, + address: AztecAddress { inner: 0x19343447ef402928668ca94b123ccddc264fbf7528da6d77277af40acd9c43d3 }, + partial_address: PartialAddress { inner: 0x1d6cbda965559d021963e76b1342a879b69c8f29c6b458068ed81a87cbf20181 }, + contract_class_id: ContractClassId { inner: 0x037a09515a79a2b8ebe5139dae1ab7c433523ac1fd5631836890df2148df51c7 }, public_keys_hash: PublicKeysHash { inner: 0x00000000000000000000000000000000000000000000000000000000000011c1 }, salted_initialization_hash: SaltedInitializationHash { inner: 0x0b1d457cdacb66e76eccb29a4e34dff5ae10b9d3d2f0d85b59aa8cf68bd1cf86 }, deployer: AztecAddress { inner: 0x0000000000000000000000000000000000000000000000000000000000000000 } diff --git a/yarn-project/protocol-contracts/scripts/copy-contracts.sh b/yarn-project/protocol-contracts/scripts/copy-contracts.sh index 14a227c80a0..a58fbdfcf60 100755 --- a/yarn-project/protocol-contracts/scripts/copy-contracts.sh +++ b/yarn-project/protocol-contracts/scripts/copy-contracts.sh @@ -6,6 +6,7 @@ contracts=( contract_class_registerer_contract-ContractClassRegisterer contract_instance_deployer_contract-ContractInstanceDeployer gas_token_contract-GasToken + key_registry_contract-KeyRegistry multi_call_entrypoint_contract-MultiCallEntrypoint ) diff --git a/yarn-project/protocol-contracts/src/key-registry/artifact.ts b/yarn-project/protocol-contracts/src/key-registry/artifact.ts new file mode 100644 index 00000000000..89436d313e6 --- /dev/null +++ b/yarn-project/protocol-contracts/src/key-registry/artifact.ts @@ -0,0 +1,6 @@ +import { loadContractArtifact } from '@aztec/types/abi'; +import { type NoirCompiledContract } from '@aztec/types/noir'; + +import KeyRegistryJson from '../artifacts/KeyRegistry.json' assert { type: 'json' }; + +export const KeyRegistryArtifact = loadContractArtifact(KeyRegistryJson as NoirCompiledContract); diff --git a/yarn-project/protocol-contracts/src/key-registry/index.test.ts b/yarn-project/protocol-contracts/src/key-registry/index.test.ts new file mode 100644 index 00000000000..216213556d8 --- /dev/null +++ b/yarn-project/protocol-contracts/src/key-registry/index.test.ts @@ -0,0 +1,17 @@ +import { + AztecAddress, + CANONICAL_KEY_REGISTRY_ADDRESS, + computeContractAddressFromInstance, + getContractClassFromArtifact, +} from '@aztec/circuits.js'; + +import { getCanonicalKeyRegistry } from './index.js'; + +describe('KeyRegistry', () => { + it('returns canonical protocol contract', () => { + const contract = getCanonicalKeyRegistry(); + expect(computeContractAddressFromInstance(contract.instance)).toEqual(contract.address); + expect(getContractClassFromArtifact(contract.artifact).id).toEqual(contract.contractClass.id); + expect(contract.address).toEqual(AztecAddress.fromBigInt(CANONICAL_KEY_REGISTRY_ADDRESS)); + }); +}); diff --git a/yarn-project/protocol-contracts/src/key-registry/index.ts b/yarn-project/protocol-contracts/src/key-registry/index.ts new file mode 100644 index 00000000000..dd4bbf31d6b --- /dev/null +++ b/yarn-project/protocol-contracts/src/key-registry/index.ts @@ -0,0 +1,22 @@ +import { AztecAddress, CANONICAL_KEY_REGISTRY_ADDRESS } from '@aztec/circuits.js'; + +import { type ProtocolContract, getCanonicalProtocolContract } from '../protocol_contract.js'; +import { KeyRegistryArtifact } from './artifact.js'; + +/** Returns the canonical deployment of the public key registry. */ +export function getCanonicalKeyRegistry(): ProtocolContract { + const contract = getCanonicalProtocolContract(KeyRegistryArtifact, 1); + + if (!contract.address.equals(KeyRegistryAddress)) { + throw new Error( + `Incorrect address for key registry (got ${contract.address.toString()} but expected ${KeyRegistryAddress.toString()}). Check CANONICAL_KEY_REGISTRY_ADDRESS is set to the correct value in the constants files and run the protocol-contracts package tests.`, + ); + } + return contract; +} + +export function getCanonicalKeyRegistryAddress(): AztecAddress { + return getCanonicalKeyRegistry().address; +} + +export const KeyRegistryAddress = AztecAddress.fromBigInt(CANONICAL_KEY_REGISTRY_ADDRESS); diff --git a/yarn-project/pxe/src/pxe_service/create_pxe_service.ts b/yarn-project/pxe/src/pxe_service/create_pxe_service.ts index 2fc3b4c9b93..5566ccdc7f5 100644 --- a/yarn-project/pxe/src/pxe_service/create_pxe_service.ts +++ b/yarn-project/pxe/src/pxe_service/create_pxe_service.ts @@ -6,6 +6,7 @@ import { initStoreForRollup } from '@aztec/kv-store/utils'; import { getCanonicalClassRegisterer } from '@aztec/protocol-contracts/class-registerer'; import { getCanonicalGasToken } from '@aztec/protocol-contracts/gas-token'; import { getCanonicalInstanceDeployer } from '@aztec/protocol-contracts/instance-deployer'; +import { getCanonicalKeyRegistry } from '@aztec/protocol-contracts/key-registry'; import { getCanonicalMultiCallEntrypointContract } from '@aztec/protocol-contracts/multi-call-entrypoint'; import { join } from 'path'; @@ -47,6 +48,7 @@ export async function createPXEService( getCanonicalInstanceDeployer(), getCanonicalMultiCallEntrypointContract(), getCanonicalGasToken(l1Contracts.gasPortalAddress), + getCanonicalKeyRegistry(), ]) { await server.registerContract(contract); } From e9504333dcb25c3f9bd1344743a0e12e7719ab2e Mon Sep 17 00:00:00 2001 From: just-mitch <68168980+just-mitch@users.noreply.github.com> Date: Thu, 25 Apr 2024 11:45:12 -0600 Subject: [PATCH 030/201] chore: run noir projects tests in earthly (#6024) noir project tests are now run as part of github actions as earthly targets --- .github/workflows/ci.yml | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fa73e81326b..f575342f2fd 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -100,6 +100,22 @@ jobs: # limit our parallelism to half our cores run: earthly --no-output +test --hardware_concurrency=64 + noir-projects: + needs: setup + runs-on: ${{ inputs.username || github.actor }}-x86 + steps: + - { + uses: actions/checkout@v4, + with: { ref: "${{ github.event.pull_request.head.sha }}" }, + } + - uses: ./.github/ci-setup-action + with: + dockerhub_password: "${{ secrets.DOCKERHUB_PASSWORD }}" + concurrency_key: noir-projects-${{ inputs.username || github.actor }}-x86 + - name: "Noir Projects" + timeout-minutes: 25 + run: earthly --no-output ./noir-projects/+test + yarn-project-formatting: needs: setup runs-on: ${{ github.actor }}-x86 @@ -118,7 +134,7 @@ jobs: run: earthly --no-output ./yarn-project/+format-check yarn-project-test: - needs: setup + needs: noir-projects runs-on: ${{ github.actor }}-x86 steps: - { From d51c8b8698187b4a69aadf1ce47f1565d71d2827 Mon Sep 17 00:00:00 2001 From: ludamad Date: Thu, 25 Apr 2024 14:34:20 -0500 Subject: [PATCH 031/201] chore: bundle spot runner + target more spot types (#6012) We had a really bad placement score with our single instance type. Merely targeting more instance types brings it from 1 to a 9. --- .github/spot-runner-action/.gitignore | 134 + .github/spot-runner-action/LICENSE | 201 + .github/spot-runner-action/README.md | 193 + .github/spot-runner-action/action.yaml | 75 + .github/spot-runner-action/dist/index.js | 95099 +++++++++++++++++ .github/spot-runner-action/package-lock.json | 3092 + .github/spot-runner-action/package.json | 44 + .github/spot-runner-action/src/config.ts | 90 + .github/spot-runner-action/src/ec2.ts | 411 + .github/spot-runner-action/src/github.ts | 152 + .github/spot-runner-action/src/main.ts | 172 + .github/spot-runner-action/src/userdata.ts | 76 + .github/spot-runner-action/src/utils.ts | 27 + .github/spot-runner-action/tsconfig.json | 15 + .github/workflows/ci.yml | 5 +- .github/workflows/setup-runner.yml | 9 +- 16 files changed, 99792 insertions(+), 3 deletions(-) create mode 100644 .github/spot-runner-action/.gitignore create mode 100644 .github/spot-runner-action/LICENSE create mode 100644 .github/spot-runner-action/README.md create mode 100644 .github/spot-runner-action/action.yaml create mode 100644 .github/spot-runner-action/dist/index.js create mode 100644 .github/spot-runner-action/package-lock.json create mode 100644 .github/spot-runner-action/package.json create mode 100644 .github/spot-runner-action/src/config.ts create mode 100644 .github/spot-runner-action/src/ec2.ts create mode 100644 .github/spot-runner-action/src/github.ts create mode 100644 .github/spot-runner-action/src/main.ts create mode 100644 .github/spot-runner-action/src/userdata.ts create mode 100644 .github/spot-runner-action/src/utils.ts create mode 100644 .github/spot-runner-action/tsconfig.json diff --git a/.github/spot-runner-action/.gitignore b/.github/spot-runner-action/.gitignore new file mode 100644 index 00000000000..486c2c13a8d --- /dev/null +++ b/.github/spot-runner-action/.gitignore @@ -0,0 +1,134 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +lerna-debug.log* +.pnpm-debug.log* + +# Diagnostic reports (https://nodejs.org/api/report.html) +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage +*.lcov + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# Snowpack dependency directory (https://snowpack.dev/) +web_modules/ + +# TypeScript cache +*.tsbuildinfo + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional stylelint cache +.stylelintcache + +# Microbundle cache +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variable files +.env +.env.development.local +.env.test.local +.env.production.local +.env.local + +# parcel-bundler cache (https://parceljs.org/) +.cache +.parcel-cache + +# Next.js build output +.next +out + +# Nuxt.js build / generate output +.nuxt +lib + +# Gatsby files +.cache/ +# Comment in the public line in if your project uses Gatsby and not Next.js +# https://nextjs.org/blog/next-9-1#public-directory-support +# public + +# vuepress build output +.vuepress/dist + +# vuepress v2.x temp and cache directory +.temp +.cache + +# Docusaurus cache and generated files +.docusaurus + +# Serverless directories +.serverless/ + +# FuseBox cache +.fusebox/ + +# DynamoDB Local files +.dynamodb/ + +# TernJS port file +.tern-port + +# Stores VSCode versions used for testing VSCode extensions +.vscode-test + +# yarn v2 +.yarn/cache +.yarn/unplugged +.yarn/build-state.yml +.yarn/install-state.gz +.pnp.* +mockConfig.ts + +# intellij +.idea \ No newline at end of file diff --git a/.github/spot-runner-action/LICENSE b/.github/spot-runner-action/LICENSE new file mode 100644 index 00000000000..261eeb9e9f8 --- /dev/null +++ b/.github/spot-runner-action/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/.github/spot-runner-action/README.md b/.github/spot-runner-action/README.md new file mode 100644 index 00000000000..b43b3b00a81 --- /dev/null +++ b/.github/spot-runner-action/README.md @@ -0,0 +1,193 @@ +# EC2 Github Action Builder + +## TL;DR +Jump to [examples](#example-usage) + +## Overview + +This is a custom GitHub action to provision and manage self-hosted runners using AWS EC2 On-Demand and/or Spot instances. + +It offers multiple spot instance provisioning modes: + +- **None:** (default) Strictly On-Demand instances only +- **SpotOnly**: Strictly Spot instances only +- **BestEffort**: Use a Spot instance of same class and size when price is <= On-Demand + - (Automatic fallback to On-Demand) + +Supported operating system AMIs: +- Amazon Linux +- Ubuntu +- Debian + +## Why? + +### Cost Savings +Operating system vCPUs Per-minute rate (USD) +```text +OS vCPU GH Price/Minute EC2 Price/Minute +Linux 2 $0.008 $0.001284 (c5a.large) +Linux 4 $0.016 $0.00257 (c5a.xlarge) +Linux 8 $0.032 $0.00514 (c5a.2xlarge) +Linux 16 $0.064 $0.0114 (c5.4xlarge) +Linux 32 $0.128 $0.02054 (c5a.8xlarge) +Linux 64 $0.256 $0.041067 (c5a.16xlarge) +``` + +Sources: +- [EC2 On-Demand Pricing](https://aws.amazon.com/ec2/pricing/on-demand/) +- [GH Action Runner Pricing](https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#per-minute-rates) + +### Customizable Machine Image +Users can provide their own custom AMI image pre-loaded with all the necessary tooling of their choice saving time and cost. + +### Enhance Security +- EC2 instances run within your infrastructure +- Easier to harden runner instances using custom AMIs, Security Groups etc +- Easier monitoring and vulnerability scanning using existing tools (e.g CloudWatch, GuardDuty, AWS Inspector etc) +- Secure networking setup by eliminating any need to expose ports to external service or using Bastion hosts! +- Lower data transfer costs (e.g ECR images, S3 objects etc) + +## Setup + +### 1. Create GitHub Personal Access Token +1. Create a [fine-grained personal access token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-fine-grained-personal-access-token) +2. Edit the token permissions and select `Only select repositories` for `Repository access` +3. Select any repositories you wish to use with this action +4. Grant `Read and Write access` for `Administration` access level under Repository permissions +5. Add the token to GitHub Action secrets and note the secret name + +### 2. Setup GitHub Secrets for IAM credentials + +1. Add your `IAM Access Key ID` and `Secret Access Key` to GitHub Secrets and note the secret names! +2. Modify `${{ secrets.DEPLOY_AWS_ACCESS_KEY_ID }}` and `${{ secrets.DEPLOY_AWS_SECRET_ACCESS_KEY }}` in examples below to match the names of your GH secrets + +*Note*: For information about required IAM permissions check **IAM role policy** [here](./docs/CrossAccountIAM.md) + +### 3. Collect EC2 information: + +- `AWS Region` (e.g `us-west-2`) +- `EC2 AMI ID` for your desired instance type in the region ([Ubuntu AMI Locator](https://cloud-images.ubuntu.com/locator/ec2/)) + - **Important Note:** Only Ubuntu, Amazon Linux and Debian AMIs have been tested + - To find AMIs **_for other operating systems follow instructions_** [here](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/finding-an-ami.html) +- `EC2 Subnet ID` for any subnet with internet access (Can be private with NAT) +- `EC2 Security Group ID` for any security group which allows all outbound traffic (Default configuration for an empty Security Group) + +Note: The security group does not require any in-bound rules. You can add in-bound rules based on your needs (e.g open SSH port 22) + +

+Examples +

+ +### Standard + +- Modify `ec2_spot_instance_strategy` for other deployment strategies. List of all values can be found [here](action.yaml) +- Modify `github_token` value to match the name for your Personal Access Token secret name + +```yaml +jobs: + start-runner: + timeout-minutes: 5 # normally it only takes 1-2 minutes + name: Start self-hosted EC2 runner + runs-on: ubuntu-latest + permissions: + actions: write + steps: + - name: Start EC2 runner + id: start-ec2-runner + uses: NextChapterSoftware/ec2-action-builder@v1 + with: + github_token: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }} + aws_access_key_id: ${{ secrets.DEPLOY_AWS_ACCESS_KEY_ID }} + aws_secret_access_key: ${{ secrets.DEPLOY_AWS_SECRET_ACCESS_KEY }} + aws_region: "us-west-2" + ec2_instance_type: c5.4xlarge + ec2_ami_id: ami-008fe2fc65df48dac + ec2_subnet_id: "SUBNET_ID_REDACTED" + ec2_security_group_id: "SECURITY_GROUP_ID_REDACTED" + ec2_instance_ttl: 40 # Optional (default is 60 minutes) + ec2_spot_instance_strategy: None # Other options are: SpotOnly, BestEffort + + # Job that runs on the self-hosted runner + run-build: + timeout-minutes: 1 + needs: + - start-runner + runs-on: ${{ github.run_id }} + steps: + - run: env +``` + + +### Advanced + +- IAM policy and role setup instructions can be found [here](docs/CrossAccountIAM.md) +- Modify `ec2_spot_instance_strategy` for other deployment strategies. List of all values can be found [here](action.yaml) + +```yaml +jobs: + start-runner: + timeout-minutes: 5 # normally it only takes 1-2 minutes + name: Start self-hosted EC2 runner + runs-on: ubuntu-latest + permissions: + actions: write + steps: + - name: Start EC2 runner + id: start-ec2-runner + uses: NextChapterSoftware/ec2-action-builder@v1 + with: + aws_access_key_id: ${{ secrets.DEPLOY_AWS_ACCESS_KEY_ID }} + aws_secret_access_key: ${{ secrets.DEPLOY_AWS_SECRET_ACCESS_KEY }} + aws_iam_role_arn: "arn:aws:iam::REDACTED:role/REDACTED" + aws_region: "us-west-2" + github_token: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }} + github_action_runner_version: v2.300.2 # Optional (default is latest release) + ec2_instance_type: c5.4xlarge + ec2_ami_id: ami-008fe2fc65df48dac + ec2_subnet_id: "SUBNET_ID_REDACTED" + ec2_security_group_id: "SECURITY_GROUP_ID_REDACTED" + ec2_instance_ttl: 40 # Optional (default is 60 minutes) + ec2_spot_instance_strategy: BestEffort # Other options are: None, BestEffort + ec2_instance_tags: > # Required for IAM role resource permission scoping + [ + {"Key": "Owner", "Value": "deploybot"} + ] + + # Job that runs on the self-hosted runner + run-build: + timeout-minutes: 1 + needs: + - start-runner + runs-on: ${{ github.run_id }} + steps: + - run: env +``` +## How it all works under the hood + +### General instance launch flow +- Your GitHub personal token is used to obtain a Runner Registration token +- If no explicit runner version has been provided, it will retrieve the latest version number +- It then uses all the provided info to compile an EC2 user-data script which does the following: + - Set a max TTL on the EC2 instance on startup + - Create a shutdown script which is executed when jobs end + - Downloads GitHub Action Runner bundle + - Unpack Action Runner bundle + - Configure Runner agent as an **ephemeral** agent +- EC2 instance is launched with the user-data script from previous step +- Once EC2 boot has completed, user-data script is executed +- Runner binary registers itself with GitHub API using the current job ID +- Once the Runner is registered, control is transferred to the next job (this is your build job) +- Upon a job completion (failure/success), Shutdown script is triggered to kill the instance with a 1 minute delay + +### Spot instance provisioning +- Script looks up On-Demand price for the supplied instance type +- It will then look up EC2 Spot instance prices using AWS API +- Depending on the mode + - SpotOnly: It will try to launch a spot instance with On-Demand price as the max price cut-off + - BestEffort: It will try to launch a spot instance but falls back to On-Demand if prices are too high! + +## Other EC2 Considerations +- Each instance is named as "{repo}-{jobID}" +- Default EC2 TTL is 60 minutes +- Other EC2 tags are `github_job_id` and `github_ref` +- Spot instances might be taken away by AWS without any prior notice \ No newline at end of file diff --git a/.github/spot-runner-action/action.yaml b/.github/spot-runner-action/action.yaml new file mode 100644 index 00000000000..a0290b63b8a --- /dev/null +++ b/.github/spot-runner-action/action.yaml @@ -0,0 +1,75 @@ +name: 'Deploy self-hosted runners to EC2 (Spot/On-Demand)' +description: 'Automatically deploy self-hosted runners to AWS EC2 using Spot/On-Demand instances and optimize for best performance/cost.' +branding: + icon: 'server' + color: 'orange' +inputs: + aws_access_key_id: + description: 'AWS access key ID' + required: true + aws_secret_access_key: + description: 'AWS secret access key' + required: true + aws_region: + description: 'AWS Region' + required: true + aws_iam_role_arn: + description: 'ARN of IAM role to assume' + required: false + default: '' + github_token: + description: 'GitHub token' + required: true + runner_label: + description: 'Key for when to recreate spot, use as runs-on' + required: true + runner_concurrency: + description: 'How many runners to create' + type: number + required: true + subaction: + description: 'One of: start (default), stop, restart' + required: false + default: 'start' + github_action_runner_version: + description: 'GitHub action runner version' + required: false + ec2_instance_type: + description: 'Ec2 instance type' + required: true + ec2_ami_id: + description: 'Ec2 ami ID' + required: true + ec2_instance_iam_role: + description: 'IAM role for to associate with ec2 instance' + required: false + default: '' + ec2_instance_tags: + description: 'List of extra aws resource tags for ec2 instance' + required: false + default: '{}' + ec2_instance_ttl: + description: 'Maximum duration an EC2 instance is allowed to live in minute' + required: false + default: '60' + ec2_security_group_id: + description: 'Security group id' + required: true + ec2_subnet_id: + description: 'Subnet id' + required: true + ec2_key_name: + description: 'EC2 keypair name' + required: true + ec2_spot_instance_strategy: + description: > + Spot instance scheduling modes: + None -> (default) no spot instances, use on-demand + SpotOnly -> Strictly spot instances only + BestEffort -> Use spot instances whenever they are cheaper than on-demand + + required: false + default: "none" +runs: + using: 'node20' + main: 'dist/index.js' \ No newline at end of file diff --git a/.github/spot-runner-action/dist/index.js b/.github/spot-runner-action/dist/index.js new file mode 100644 index 00000000000..bcd892e70c3 --- /dev/null +++ b/.github/spot-runner-action/dist/index.js @@ -0,0 +1,95099 @@ +/******/ (() => { // webpackBootstrap +/******/ var __webpack_modules__ = ({ + +/***/ 20088: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ActionConfig = void 0; +const core = __importStar(__nccwpck_require__(42186)); +const github = __importStar(__nccwpck_require__(95438)); +class ActionConfig { + constructor() { + // AWS account and credentials params + this.awsAccessKeyId = core.getInput("aws_access_key_id"); + this.awsSecretAccessKey = core.getInput("aws_secret_access_key"); + this.awsRegion = core.getInput("aws_region"); + this.awsIamRoleArn = core.getInput("aws_iam_role_arn"); + this.awsAssumeRole = this.awsIamRoleArn ? true : false; + // Github params + this.githubToken = core.getInput("github_token"); + this.githubJobId = core.getInput("runner_label"); + this.githubRef = github.context.ref; + this.githubRepo = github.context.repo.repo; + this.githubActionRunnerVersion = core.getInput("github_action_runner_version"); + this.githubActionRunnerLabel = this.githubJobId; + this.subaction = core.getInput("subaction"); + this.githubActionRunnerConcurrency = +core.getInput("runner_concurrency"); + // Ec2 params + this.ec2InstanceType = core.getInput("ec2_instance_type").split(" "); + this.ec2AmiId = core.getInput("ec2_ami_id"); + this.ec2InstanceIamRole = core.getInput("ec2_instance_iam_role"); + this.ec2InstanceTags = core.getInput("ec2_instance_tags"); + this.ec2InstanceTtl = core.getInput("ec2_instance_ttl"); + this.ec2SubnetId = core.getInput("ec2_subnet_id"); + this.ec2KeyName = core.getInput("ec2_key_name"); + this.ec2SecurityGroupId = core.getInput("ec2_security_group_id"); + this.ec2SpotInstanceStrategy = core + .getInput("ec2_spot_instance_strategy") + .toLowerCase(); + } +} +exports.ActionConfig = ActionConfig; + + +/***/ }), + +/***/ 32695: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Ec2Instance = void 0; +const aws_sdk_1 = __importDefault(__nccwpck_require__(71786)); +const crypto = __importStar(__nccwpck_require__(6113)); +const core = __importStar(__nccwpck_require__(42186)); +const userdata_1 = __nccwpck_require__(77519); +class Ec2Instance { + constructor(config) { + this.assumedRole = false; + this.config = config; + this.credentials = new aws_sdk_1.default.Credentials({ + accessKeyId: this.config.awsAccessKeyId, + secretAccessKey: this.config.awsSecretAccessKey, + }); + this.client = new aws_sdk_1.default.EC2({ + credentials: this.credentials, + region: this.config.awsRegion, + }); + this.tags = this.getTags(); + } + getEc2Client() { + return __awaiter(this, void 0, void 0, function* () { + if (!this.assumedRole && this.config.awsAssumeRole) { + this.assumedRole = !this.assumedRole; + const credentials = yield this.getCrossAccountCredentials(); + this.client = new aws_sdk_1.default.EC2({ + credentials: credentials, + region: this.config.awsRegion, + }); + } + return this.client; + }); + } + getTags() { + // Parse custom tags + let customTags = []; + if (this.config.ec2InstanceTags) { + customTags = JSON.parse(this.config.ec2InstanceTags); + } + return [ + { + Key: "Name", + Value: `${this.config.githubRepo}-${this.config.githubJobId}`, + }, + { + Key: "github_ref", + Value: this.config.githubRef, + }, + { + Key: "owner", + Value: "EC2_ACTION_BUILDER", + }, + { + Key: "github_job_id", + Value: this.config.githubJobId, + }, + { + Key: "github_repo", + Value: this.config.githubRepo, + }, + ...customTags, + ]; + } + getCrossAccountCredentials() { + return __awaiter(this, void 0, void 0, function* () { + const stsClient = new aws_sdk_1.default.STS({ + credentials: this.credentials, + region: this.config.awsRegion, + }); + const timestamp = new Date().getTime(); + const params = { + RoleArn: this.config.awsIamRoleArn, + RoleSessionName: `ec2-action-builder-${this.config.githubJobId}-${timestamp}`, + }; + try { + const data = yield stsClient.assumeRole(params).promise(); + if (data.Credentials) + return { + accessKeyId: data.Credentials.AccessKeyId, + secretAccessKey: data.Credentials.SecretAccessKey, + sessionToken: data.Credentials.SessionToken, + }; + core.error(`STS returned empty response`); + throw Error("STS returned empty response"); + } + catch (error) { + core.error(`STS assume role failed`); + throw error; + } + }); + } + // async runInstances(params: RunInstancesRequest) { + // const client = await this.getEc2Client(); + // try { + // return (await client.runInstances(params).promise()).Instances; + // } catch (error) { + // core.error(`Failed to create instance(s)`); + // throw error; + // } + // } + getSubnetAzId() { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const client = yield this.getEc2Client(); + try { + const subnets = (yield client + .describeSubnets({ + SubnetIds: [this.config.ec2SubnetId], + }) + .promise()).Subnets; + return (_a = subnets === null || subnets === void 0 ? void 0 : subnets.at(0)) === null || _a === void 0 ? void 0 : _a.AvailabilityZoneId; + } + catch (error) { + core.error(`Failed to lookup subnet az`); + throw error; + } + }); + } + getSubnetAz() { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const client = yield this.getEc2Client(); + try { + const subnets = (yield client + .describeSubnets({ + SubnetIds: [this.config.ec2SubnetId], + }) + .promise()).Subnets; + return (_a = subnets === null || subnets === void 0 ? void 0 : subnets.at(0)) === null || _a === void 0 ? void 0 : _a.AvailabilityZone; + } + catch (error) { + core.error(`Failed to lookup subnet az`); + throw error; + } + }); + } + getHashOfStringArray(strings) { + const hash = crypto.createHash("sha256"); + hash.update(strings.join("")); // Concatenate all strings in the array + return hash.digest("hex"); + } + getLaunchTemplate() { + return __awaiter(this, void 0, void 0, function* () { + const client = yield this.getEc2Client(); + // NOTE: This should be deterministic or we will create a launch template each time + const userData = yield new userdata_1.UserData(this.config).getUserData(); + const ec2InstanceTypeHash = this.getHashOfStringArray(this.config.ec2InstanceType.concat([userData]).concat([JSON.stringify(this.tags)])); + const launchTemplateName = "aztec-packages-spot-" + this.config.ec2AmiId + "-" + ec2InstanceTypeHash; + const launchTemplateParams = { + LaunchTemplateName: launchTemplateName, + LaunchTemplateData: { + ImageId: this.config.ec2AmiId, + InstanceRequirements: { + // We do not know what the instance types correspond to + // just let the user send a list of allowed instance types + VCpuCount: { Min: 0 }, + MemoryMiB: { Min: 0 }, + AllowedInstanceTypes: this.config.ec2InstanceType, + }, + UserData: userData, + TagSpecifications: [ + { + ResourceType: "instance", + Tags: this.tags, + }, + ], + BlockDeviceMappings: [ + { + DeviceName: "/dev/sda1", + Ebs: { + VolumeSize: 32, + }, + }, + ], + }, + }; + let arr = []; + try { + arr = (yield client + .describeLaunchTemplates({ + LaunchTemplateNames: [launchTemplateName], + }) + .promise()).LaunchTemplates || []; + } + catch (err) { + core.info("Launch templates describe error, note this will be likely resolved by creating the template in the next step: " + err); + } + core.info("Launch templates found: " + JSON.stringify(arr, null, 2)); + if (arr.length <= 0) { + core.info("Creating launch template: " + launchTemplateName); + yield client.createLaunchTemplate(launchTemplateParams).promise(); + } + return launchTemplateName; + }); + } + requestMachine(useOnDemand) { + return __awaiter(this, void 0, void 0, function* () { + // Note advice re max bid: "If you specify a maximum price, your instances will be interrupted more frequently than if you do not specify this parameter." + const availabilityZone = yield this.getSubnetAz(); + const fleetLaunchConfig = { + LaunchTemplateSpecification: { + Version: "$Latest", + LaunchTemplateName: yield this.getLaunchTemplate(), + }, + Overrides: this.config.ec2InstanceType.map((instanceType) => ({ + InstanceType: instanceType, + AvailabilityZone: availabilityZone, + SubnetId: this.config.ec2SubnetId, + })), + }; + const createFleetRequest = { + Type: "instant", + LaunchTemplateConfigs: [fleetLaunchConfig], + TargetCapacitySpecification: { + TotalTargetCapacity: 1, + OnDemandTargetCapacity: useOnDemand ? 1 : 0, + SpotTargetCapacity: useOnDemand ? 0 : 1, + DefaultTargetCapacityType: useOnDemand ? "on-demand" : "spot", + }, + }; + // const config: SpotFleetRequestConfigData = { + // IamFleetRole: + // "arn:aws:iam::278380418400:role/aws-ec2-spot-fleet-tagging-role", + // TargetCapacity: 1, + // // We always ask for 1 instance, but might ask for 100% on demand or spot + // OnDemandTargetCapacity: useOnDemand ? 1 : 0, + // TerminateInstancesWithExpiration: true, + // Type: "request", + // LaunchSpecifications: + // }; + // const params: RequestSpotFleetRequest = { + // SpotFleetRequestConfig: config, + // }; + const client = yield this.getEc2Client(); + const fleet = yield client.createFleet(createFleetRequest).promise(); + const instances = ((fleet === null || fleet === void 0 ? void 0 : fleet.Instances) || [])[0] || {}; + return (instances.InstanceIds || [])[0]; + }); + } + // async getOnDemandInstanceConfiguration( + // ec2SpotInstanceStrategy: string + // ): Promise { + // const userData = new UserData(this.config); + // const params: RunInstancesRequest = { + // ImageId: this.config.ec2AmiId, + // InstanceInitiatedShutdownBehavior: "terminate", + // InstanceMarketOptions: {}, + // InstanceType: "", + // MaxCount: 1, + // MinCount: 1, + // SecurityGroupIds: [this.config.ec2SecurityGroupId], + // SubnetId: this.config.ec2SubnetId, + // KeyName: this.config.ec2KeyName, + // Placement: { + // AvailabilityZone: await this.getSubnetAz(), + // }, + // TagSpecifications: [ + // { + // ResourceType: "instance", + // Tags: this.tags, + // }, + // ], + // // parity with build-system + // BlockDeviceMappings: [ + // { + // DeviceName: "/dev/sda1", + // Ebs: { + // VolumeSize: 32, + // }, + // }, + // ], + // // parity with build-system + // UserData: await userData.getUserData(), + // }; + // switch (ec2SpotInstanceStrategy.toLowerCase()) { + // case "besteffort": + // case "spotonly": { + // params.InstanceMarketOptions = { + // MarketType: "spot", + // SpotOptions: { + // InstanceInterruptionBehavior: "terminate", + // SpotInstanceType: "one-time", + // }, + // }; + // break; + // } + // case "none": { + // params.InstanceMarketOptions = {}; + // break; + // } + // default: { + // throw new TypeError("Invalid value for ec2_spot_instance_strategy"); + // } + // } + // return params; + // } + getInstanceStatus(instanceId) { + return __awaiter(this, void 0, void 0, function* () { + const client = yield this.getEc2Client(); + try { + const instanceList = (yield client + .describeInstanceStatus({ InstanceIds: [instanceId] }) + .promise()).InstanceStatuses; + return instanceList === null || instanceList === void 0 ? void 0 : instanceList.at(0); + } + catch (error) { + core.error(`Failed to lookup status for instance ${instanceId}`); + throw error; + } + }); + } + getInstancesForTags() { + return __awaiter(this, void 0, void 0, function* () { + const client = yield this.getEc2Client(); + const filters = [ + { + Name: "tag:Name", + Values: [`${this.config.githubRepo}-${this.config.githubJobId}`], + }, + ]; + try { + var params = { + Filters: filters, + MaxResults: 99, + }; + let instances = []; + for (const reservation of (yield client.describeInstances(params).promise()).Reservations || []) { + instances = instances.concat(reservation.Instances || []); + } + return instances; + } + catch (error) { + core.error(`Failed to lookup status for instance for tags ${JSON.stringify(filters, null, 2)}`); + throw error; + } + }); + } + waitForInstanceRunningStatus(instanceId) { + return __awaiter(this, void 0, void 0, function* () { + const client = yield this.getEc2Client(); + try { + yield client + .waitFor("instanceRunning", { InstanceIds: [instanceId] }) + .promise(); + core.info(`AWS EC2 instance ${instanceId} is up and running`); + return; + } + catch (error) { + core.error(`AWS EC2 instance ${instanceId} init error`); + throw error; + } + }); + } + terminateInstances(instanceIds) { + return __awaiter(this, void 0, void 0, function* () { + if (instanceIds.length === 0) { + return; + } + const client = yield this.getEc2Client(); + try { + yield client.terminateInstances({ InstanceIds: instanceIds }).promise(); + core.info(`AWS EC2 instances ${instanceIds.join(", ")} are terminated`); + return; + } + catch (error) { + core.info(`Failed to terminate instances ${instanceIds.join(", ")}`); + throw error; + } + }); + } +} +exports.Ec2Instance = Ec2Instance; + + +/***/ }), + +/***/ 85928: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GithubClient = void 0; +const github = __importStar(__nccwpck_require__(95438)); +const http_client_1 = __nccwpck_require__(96255); +const _ = __importStar(__nccwpck_require__(90250)); +const core = __importStar(__nccwpck_require__(42186)); +class GithubClient { + constructor(config) { + this.config = config; + } + getRunnerVersion() { + return __awaiter(this, void 0, void 0, function* () { + if (this.config.githubActionRunnerVersion) + return this.config.githubActionRunnerVersion.replace("v", ""); + const httpClient = new http_client_1.HttpClient("http-client"); + const res = yield httpClient.get("https://api.github.com/repos/actions/runner/releases/latest"); + const body = yield res.readBody(); + const obj = JSON.parse(body); + // aztec: workaround random flake in this api seem in ci + return (obj["tag_name"] || "v2.315.0").replace("v", ""); + }); + } + getAllRunners() { + return __awaiter(this, void 0, void 0, function* () { + const octokit = github.getOctokit(this.config.githubToken); + try { + let page = 1; + const per_page = 100; + let response = yield octokit.rest.actions.listSelfHostedRunnersForRepo({ + owner: github.context.repo.owner, + repo: github.context.repo.repo, + per_page, + page, + }); + let allRunners = response.data.runners; + let totalCount = response.data.total_count; + while (allRunners.length < totalCount) { + page++; + response = yield octokit.rest.actions.listSelfHostedRunnersForRepo({ + owner: github.context.repo.owner, + repo: github.context.repo.repo, + per_page, + page, + }); + totalCount = response.data.total_count; + allRunners = allRunners.concat(response.data.runners); + } + return allRunners; + } + catch (error) { + core.error(`Failed to list github runners: ${error}`); + throw error; + } + }); + } + getRunnersWithLabels(labels) { + return __awaiter(this, void 0, void 0, function* () { + const searchLabels = { + labels: labels.map(function (label) { + return { name: label }; + }), + }; + return _.filter(yield this.getAllRunners(), searchLabels); + }); + } + getRunnerRegistrationToken() { + return __awaiter(this, void 0, void 0, function* () { + const octokit = github.getOctokit(this.config.githubToken); + try { + const response = yield octokit.rest.actions.createRegistrationTokenForRepo({ + owner: github.context.repo.owner, + repo: github.context.repo.repo, + }); + return response.data; + } + catch (error) { + core.error(`Failed to get Runner registration token: ${error}`); + throw error; + } + }); + } + removeRunnersWithLabels(labels) { + return __awaiter(this, void 0, void 0, function* () { + let deletedAll = true; + try { + const runners = yield this.getRunnersWithLabels(labels); + console.log("Found existing runners:", runners.map((r) => r.name)); + const octokit = github.getOctokit(this.config.githubToken); + for (const runner of runners) { + const response = yield octokit.rest.actions.deleteSelfHostedRunnerFromRepo({ + owner: github.context.repo.owner, + repo: github.context.repo.repo, + runner_id: runner.id, + }); + deletedAll = deletedAll && response.status == 204; + } + } + catch (error) { + core.error(`Failed to delete runner: ${error}`); + } + return deletedAll; + }); + } + hasRunner(labels) { + return __awaiter(this, void 0, void 0, function* () { + for (const runner of yield this.getRunnersWithLabels(labels)) { + if (runner.status === "online") { + core.info(`GitHub self-hosted runner ${runner.name} with label ${labels} is ready to use. Continuing assuming other runners are online.`); + return true; + } + } + return false; + }); + } + // Borrowed from https://github.com/machulav/ec2-github-runner/blob/main/src/aws.js + pollForRunnerCreation(labels) { + return __awaiter(this, void 0, void 0, function* () { + const timeoutMinutes = 5; + const retryIntervalSeconds = 10; + const quietPeriodSeconds = 30; + let waitSeconds = 0; + core.info(`Waiting ${quietPeriodSeconds}s before polling for runners`); + yield new Promise((r) => setTimeout(r, quietPeriodSeconds * 1000)); + core.info(`Polling for runners every ${retryIntervalSeconds}s`); + return new Promise((resolve, reject) => { + const interval = setInterval(() => __awaiter(this, void 0, void 0, function* () { + if (waitSeconds > timeoutMinutes * 60) { + core.error("GitHub self-hosted runner creation error"); + clearInterval(interval); + reject(`A timeout of ${timeoutMinutes} minutes is exceeded. Please ensure your EC2 instance has access to the Internet.`); + } + if (yield this.hasRunner(labels)) { + clearInterval(interval); + return; + } + waitSeconds += retryIntervalSeconds; + core.info("Waiting for runners..."); + }), retryIntervalSeconds * 1000); + }); + }); + } +} +exports.GithubClient = GithubClient; + + +/***/ }), + +/***/ 3109: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__nccwpck_require__(42186)); +const config_1 = __nccwpck_require__(20088); +const ec2_1 = __nccwpck_require__(32695); +const github_1 = __nccwpck_require__(85928); +const utils_1 = __nccwpck_require__(50918); +function pollSpotStatus(config, ec2Client, ghClient) { + return __awaiter(this, void 0, void 0, function* () { + // 12 iters x 10000 ms = 2 minutes + for (let iter = 0; iter < 12; iter++) { + const instances = yield ec2Client.getInstancesForTags(); + const hasInstance = instances.filter((i) => { var _a; return ((_a = i.State) === null || _a === void 0 ? void 0 : _a.Name) === "running"; }).length > 0; + if (!hasInstance) { + // we need to start an instance + return "none"; + } + try { + core.info("Found ec2 instance, looking for runners."); + if (yield ghClient.hasRunner([config.githubJobId])) { + // we have runners + return "usable"; + } + } + catch (err) { } + // wait 10 seconds + yield new Promise((r) => setTimeout(r, 10000)); + } + // we have a bad state for a while, error + core.warning("Looped for 2 minutes and could only find spot with no runners!"); + return "unusable"; + }); +} +function start() { + return __awaiter(this, void 0, void 0, function* () { + const config = new config_1.ActionConfig(); + if (config.subaction === "stop") { + yield stop(); + return; + } + else if (config.subaction === "restart") { + yield stop(); + // then we make a fresh instance + } + else if (config.subaction !== "start") { + throw new Error("Unexpected subaction: " + config.subaction); + } + // subaction is 'start' or 'restart'estart' + const ec2Client = new ec2_1.Ec2Instance(config); + const ghClient = new github_1.GithubClient(config); + const spotStatus = yield pollSpotStatus(config, ec2Client, ghClient); + if (spotStatus === "usable") { + core.info(`Runner already running. Continuing as we can target it with jobs.`); + return; + } + if (spotStatus === "unusable") { + core.warning("Taking down spot as it has no runners! If we were mistaken, this could impact existing jobs."); + if (config.subaction === "restart") { + throw new Error("Taking down spot we just started. This seems wrong, erroring out."); + } + yield stop(); + } + var ec2SpotStrategies; + switch (config.ec2SpotInstanceStrategy) { + case "besteffort": { + ec2SpotStrategies = ["BestEffort", "none"]; + core.info("Ec2 spot instance strategy is set to 'BestEffort' with 'None' as fallback"); + break; + } + default: { + ec2SpotStrategies = [config.ec2SpotInstanceStrategy]; + core.info(`Ec2 spot instance strategy is set to ${config.ec2SpotInstanceStrategy}`); + } + } + var instanceId = ""; + for (const ec2Strategy of ec2SpotStrategies) { + core.info(`Starting instance with ${ec2Strategy} strategy`); + // 6 * 10000ms = 1 minute per strategy + // TODO make longer lived spot request? + for (let i = 0; i < 6; i++) { + try { + // Start instance + instanceId = (yield ec2Client.requestMachine( + // we fallback to on-demand + ec2Strategy.toLocaleLowerCase() === "none")) || ""; + if (instanceId) { + break; + } + // let's exit, only loop on InsufficientInstanceCapacity + break; + } + catch (error) { + // TODO is this still the relevant error? + if ((error === null || error === void 0 ? void 0 : error.code) && + error.code === "InsufficientInstanceCapacity" && + ec2SpotStrategies.length > 0 && + ec2Strategy.toLocaleLowerCase() != "none") { + core.info("Failed to create instance due to 'InsufficientInstanceCapacity', waiting 10 seconds and trying again."); + // we loop after 10 seconds + } + else { + throw error; + } + } + // wait 10 seconds + yield new Promise((r) => setTimeout(r, 10000)); + } + if (instanceId) { + core.info("Successfully requested instance with ID " + instanceId); + break; + } + } + if (instanceId) + yield ec2Client.waitForInstanceRunningStatus(instanceId); + else { + core.error("Failed to get ID of running instance"); + throw Error("Failed to get ID of running instance"); + } + if (instanceId) + yield ghClient.pollForRunnerCreation([config.githubJobId]); + else { + core.error("Instance failed to register with Github Actions"); + throw Error("Instance failed to register with Github Actions"); + } + }); +} +function stop() { + return __awaiter(this, void 0, void 0, function* () { + try { + core.info("Starting instance cleanup"); + const config = new config_1.ActionConfig(); + const ec2Client = new ec2_1.Ec2Instance(config); + const ghClient = new github_1.GithubClient(config); + const instances = yield ec2Client.getInstancesForTags(); + yield ec2Client.terminateInstances(instances.map((i) => i.InstanceId)); + core.info("Clearing previously installed runners"); + const result = yield ghClient.removeRunnersWithLabels([config.githubJobId]); + if (result) { + core.info("Finished runner cleanup"); + } + else { + throw Error("Failed to cleanup runners. Continuing, but failure expected!"); + } + } + catch (error) { + core.info(error); + } + }); +} +(function () { + return __awaiter(this, void 0, void 0, function* () { + try { + start(); + } + catch (error) { + stop(); + (0, utils_1.assertIsError)(error); + core.error(error); + core.setFailed(error.message); + } + }); +})(); + + +/***/ }), + +/***/ 77519: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.UserData = void 0; +const github = __importStar(__nccwpck_require__(95438)); +const github_1 = __nccwpck_require__(85928); +class UserData { + constructor(config) { + this.config = config; + } + getUserData() { + return __awaiter(this, void 0, void 0, function* () { + const ghClient = new github_1.GithubClient(this.config); + const githubActionRunnerVersion = yield ghClient.getRunnerVersion(); + // Retrieve runner registration tokens in parallel + const tokens = yield Promise.all(Array.from({ length: this.config.githubActionRunnerConcurrency }, () => ghClient.getRunnerRegistrationToken())); + if (!this.config.githubActionRunnerLabel) + throw Error("failed to object job ID for label"); + const runnerNameBase = `${this.config.githubJobId}-ec2`; + // space-separated registration tokens + const tokensSpaceSep = tokens.map((t) => t.token).join(" "); + const bumpShutdown = `shutdown -c ; shutdown -P +${this.config.ec2InstanceTtl}`; + // Note, we dont make the runner ephemeral as we start fresh runners as needed + // and delay shutdowns whenever jobs start + // TODO could deregister runners right before shutdown starts + const cmds = [ + "#!/bin/bash", + `exec 1>/run/log.out 2>&1`, + `shutdown -P +${this.config.ec2InstanceTtl}`, + "cd /run", + `mkdir -p shutdown-refcount`, + // Shutdown rules: + // - github actions job starts and ends always bump +ec2InstanceTtl minutes + // - when the amount of started jobs (start_run_* files) equal the amount of finished jobs (end_run_* files), we shutdown in 5 minutes + `echo "${bumpShutdown}; touch /run/shutdown-refcount/start_run_\\$(date +%s)_\\$RANDOM" > /run/delay_shutdown.sh`, + // `echo "[ \\$(find /run/shutdown-refcount/ -name 'start_run_*' | wc -l) -eq \\$(find /run/shutdown-refcount/ -name 'end_run_*' | wc -l) ] && shutdown -P 5 ; true" > /run/if_refcount0_shutdown.sh`, + `echo "echo refcounting disabled for now" > /run/if_refcount0_shutdown.sh`, + `echo "${bumpShutdown}; touch /run/shutdown-refcount/end_run_\\$(date +%s)_\\$RANDOM ; /run/if_refcount0_shutdown.sh " > /run/refcount_and_delay_shutdown.sh`, + `echo "flock /run/refcount-lock /run/delay_shutdown.sh" > /run/safe_delay_shutdown.sh`, + `echo "flock /run/refcount-lock /run/refcount_and_delay_shutdown.sh" > /run/safe_refcount_and_delay_shutdown.sh`, + "chmod +x /run/delay_shutdown.sh", + "chmod +x /run/refcount_and_delay_shutdown.sh", + "chmod +x /run/if_refcount0_shutdown.sh", + "chmod +x /run/safe_refcount_and_delay_shutdown.sh", + "chmod +x /run/safe_if_refcount0_shutdown.sh", + "export ACTIONS_RUNNER_HOOK_JOB_STARTED=/run/safe_delay_shutdown.sh", + "export ACTIONS_RUNNER_HOOK_JOB_COMPLETED=/run/safe_refcount_and_delay_shutdown.sh", + "mkdir -p actions-runner && cd actions-runner", + 'echo "ACTIONS_RUNNER_HOOK_JOB_STARTED=/run/safe_delay_shutdown.sh" > .env', + 'echo "ACTIONS_RUNNER_HOOK_JOB_COMPLETED=/run/safe_refcount_and_delay_shutdown.sh" > .env', + `GH_RUNNER_VERSION=${githubActionRunnerVersion}`, + 'case $(uname -m) in aarch64) ARCH="arm64" ;; amd64|x86_64) ARCH="x64" ;; esac && export RUNNER_ARCH=${ARCH}', + "curl -O -L https://github.com/actions/runner/releases/download/v${GH_RUNNER_VERSION}/actions-runner-linux-${RUNNER_ARCH}-${GH_RUNNER_VERSION}.tar.gz", + "tar xzf ./actions-runner-linux-${RUNNER_ARCH}-${GH_RUNNER_VERSION}.tar.gz", + "export RUNNER_ALLOW_RUNASROOT=1", + "mv externals ..", + // Note sharing bin doesn't work due to using it as a folder, and we don't bother splitting up sharing bin + "rm ./actions-runner-linux-${RUNNER_ARCH}-${GH_RUNNER_VERSION}.tar.gz", + '[ -n "$(command -v yum)" ] && yum install libicu -y', + `TOKENS=(${tokensSpaceSep}) ; echo ${tokensSpaceSep} > /run/github-runner-tokens`, + `for i in {0..${this.config.githubActionRunnerConcurrency - 1}}; do`, + ` ( cp -r . ../${runnerNameBase}-$i && ln -s $(pwd)/../externals ../${runnerNameBase}-$i && cd ../${runnerNameBase}-$i && echo \${TOKENS[i]} > .runner-token && ./config.sh --unattended --url https://github.com/${github.context.repo.owner}/${github.context.repo.repo} --token \${TOKENS[i]} --labels ${this.config.githubActionRunnerLabel} --replace --name ${runnerNameBase}-$i ; ./run.sh ) &`, + "done", + "wait", // Wait for all background processes to finish + ]; + console.log("Sending: ", cmds.filter((x) => !x.startsWith("TOKENS")).join("\n")); + return Buffer.from(cmds.join("\n")).toString("base64"); + }); + } +} +exports.UserData = UserData; + + +/***/ }), + +/***/ 50918: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.assertIsError = exports.findValuesHelper = void 0; +function findValuesHelper(obj, key) { + let list = []; + if (!obj) + return list; + if (obj instanceof Array) { + for (var i in obj) { + list = list.concat(findValuesHelper(obj[i], key)); + } + return list; + } + if (obj[key]) + list.push(obj[key]); + if (typeof obj == "object" && obj !== null) { + let children = Object.keys(obj); + if (children.length > 0) { + for (let i = 0; i < children.length; i++) { + list = list.concat(findValuesHelper(obj[children[i]], key)); + } + } + } + return list; +} +exports.findValuesHelper = findValuesHelper; +function assertIsError(error) { + if (!(error instanceof Error)) { + throw error; + } +} +exports.assertIsError = assertIsError; + + +/***/ }), + +/***/ 87351: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.issue = exports.issueCommand = void 0; +const os = __importStar(__nccwpck_require__(22037)); +const utils_1 = __nccwpck_require__(5278); +/** + * Commands + * + * Command Format: + * ::name key=value,key=value::message + * + * Examples: + * ::warning::This is the message + * ::set-env name=MY_VAR::some value + */ +function issueCommand(command, properties, message) { + const cmd = new Command(command, properties, message); + process.stdout.write(cmd.toString() + os.EOL); +} +exports.issueCommand = issueCommand; +function issue(name, message = '') { + issueCommand(name, {}, message); +} +exports.issue = issue; +const CMD_STRING = '::'; +class Command { + constructor(command, properties, message) { + if (!command) { + command = 'missing.command'; + } + this.command = command; + this.properties = properties; + this.message = message; + } + toString() { + let cmdStr = CMD_STRING + this.command; + if (this.properties && Object.keys(this.properties).length > 0) { + cmdStr += ' '; + let first = true; + for (const key in this.properties) { + if (this.properties.hasOwnProperty(key)) { + const val = this.properties[key]; + if (val) { + if (first) { + first = false; + } + else { + cmdStr += ','; + } + cmdStr += `${key}=${escapeProperty(val)}`; + } + } + } + } + cmdStr += `${CMD_STRING}${escapeData(this.message)}`; + return cmdStr; + } +} +function escapeData(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A'); +} +function escapeProperty(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A') + .replace(/:/g, '%3A') + .replace(/,/g, '%2C'); +} +//# sourceMappingURL=command.js.map + +/***/ }), + +/***/ 42186: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; +const command_1 = __nccwpck_require__(87351); +const file_command_1 = __nccwpck_require__(717); +const utils_1 = __nccwpck_require__(5278); +const os = __importStar(__nccwpck_require__(22037)); +const path = __importStar(__nccwpck_require__(71017)); +const oidc_utils_1 = __nccwpck_require__(98041); +/** + * The code to exit an action + */ +var ExitCode; +(function (ExitCode) { + /** + * A code indicating that the action was successful + */ + ExitCode[ExitCode["Success"] = 0] = "Success"; + /** + * A code indicating that the action was a failure + */ + ExitCode[ExitCode["Failure"] = 1] = "Failure"; +})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); +//----------------------------------------------------------------------- +// Variables +//----------------------------------------------------------------------- +/** + * Sets env variable for this action and future actions in the job + * @param name the name of the variable to set + * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function exportVariable(name, val) { + const convertedVal = utils_1.toCommandValue(val); + process.env[name] = convertedVal; + const filePath = process.env['GITHUB_ENV'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val)); + } + command_1.issueCommand('set-env', { name }, convertedVal); +} +exports.exportVariable = exportVariable; +/** + * Registers a secret which will get masked from logs + * @param secret value of the secret + */ +function setSecret(secret) { + command_1.issueCommand('add-mask', {}, secret); +} +exports.setSecret = setSecret; +/** + * Prepends inputPath to the PATH (for this action and future actions) + * @param inputPath + */ +function addPath(inputPath) { + const filePath = process.env['GITHUB_PATH'] || ''; + if (filePath) { + file_command_1.issueFileCommand('PATH', inputPath); + } + else { + command_1.issueCommand('add-path', {}, inputPath); + } + process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; +} +exports.addPath = addPath; +/** + * Gets the value of an input. + * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. + * Returns an empty string if the value is not defined. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string + */ +function getInput(name, options) { + const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; + if (options && options.required && !val) { + throw new Error(`Input required and not supplied: ${name}`); + } + if (options && options.trimWhitespace === false) { + return val; + } + return val.trim(); +} +exports.getInput = getInput; +/** + * Gets the values of an multiline input. Each value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string[] + * + */ +function getMultilineInput(name, options) { + const inputs = getInput(name, options) + .split('\n') + .filter(x => x !== ''); + if (options && options.trimWhitespace === false) { + return inputs; + } + return inputs.map(input => input.trim()); +} +exports.getMultilineInput = getMultilineInput; +/** + * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. + * Support boolean input list: `true | True | TRUE | false | False | FALSE` . + * The return value is also in boolean type. + * ref: https://yaml.org/spec/1.2/spec.html#id2804923 + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns boolean + */ +function getBooleanInput(name, options) { + const trueValue = ['true', 'True', 'TRUE']; + const falseValue = ['false', 'False', 'FALSE']; + const val = getInput(name, options); + if (trueValue.includes(val)) + return true; + if (falseValue.includes(val)) + return false; + throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + + `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); +} +exports.getBooleanInput = getBooleanInput; +/** + * Sets the value of an output. + * + * @param name name of the output to set + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function setOutput(name, value) { + const filePath = process.env['GITHUB_OUTPUT'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value)); + } + process.stdout.write(os.EOL); + command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value)); +} +exports.setOutput = setOutput; +/** + * Enables or disables the echoing of commands into stdout for the rest of the step. + * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. + * + */ +function setCommandEcho(enabled) { + command_1.issue('echo', enabled ? 'on' : 'off'); +} +exports.setCommandEcho = setCommandEcho; +//----------------------------------------------------------------------- +// Results +//----------------------------------------------------------------------- +/** + * Sets the action status to failed. + * When the action exits it will be with an exit code of 1 + * @param message add error issue message + */ +function setFailed(message) { + process.exitCode = ExitCode.Failure; + error(message); +} +exports.setFailed = setFailed; +//----------------------------------------------------------------------- +// Logging Commands +//----------------------------------------------------------------------- +/** + * Gets whether Actions Step Debug is on or not + */ +function isDebug() { + return process.env['RUNNER_DEBUG'] === '1'; +} +exports.isDebug = isDebug; +/** + * Writes debug message to user log + * @param message debug message + */ +function debug(message) { + command_1.issueCommand('debug', {}, message); +} +exports.debug = debug; +/** + * Adds an error issue + * @param message error issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function error(message, properties = {}) { + command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.error = error; +/** + * Adds a warning issue + * @param message warning issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function warning(message, properties = {}) { + command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.warning = warning; +/** + * Adds a notice issue + * @param message notice issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function notice(message, properties = {}) { + command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.notice = notice; +/** + * Writes info to log with console.log. + * @param message info message + */ +function info(message) { + process.stdout.write(message + os.EOL); +} +exports.info = info; +/** + * Begin an output group. + * + * Output until the next `groupEnd` will be foldable in this group + * + * @param name The name of the output group + */ +function startGroup(name) { + command_1.issue('group', name); +} +exports.startGroup = startGroup; +/** + * End an output group. + */ +function endGroup() { + command_1.issue('endgroup'); +} +exports.endGroup = endGroup; +/** + * Wrap an asynchronous function call in a group. + * + * Returns the same type as the function itself. + * + * @param name The name of the group + * @param fn The function to wrap in the group + */ +function group(name, fn) { + return __awaiter(this, void 0, void 0, function* () { + startGroup(name); + let result; + try { + result = yield fn(); + } + finally { + endGroup(); + } + return result; + }); +} +exports.group = group; +//----------------------------------------------------------------------- +// Wrapper action state +//----------------------------------------------------------------------- +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function saveState(name, value) { + const filePath = process.env['GITHUB_STATE'] || ''; + if (filePath) { + return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value)); + } + command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value)); +} +exports.saveState = saveState; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +function getState(name) { + return process.env[`STATE_${name}`] || ''; +} +exports.getState = getState; +function getIDToken(aud) { + return __awaiter(this, void 0, void 0, function* () { + return yield oidc_utils_1.OidcClient.getIDToken(aud); + }); +} +exports.getIDToken = getIDToken; +/** + * Summary exports + */ +var summary_1 = __nccwpck_require__(81327); +Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); +/** + * @deprecated use core.summary + */ +var summary_2 = __nccwpck_require__(81327); +Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); +/** + * Path exports + */ +var path_utils_1 = __nccwpck_require__(2981); +Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); +Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); +Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); +//# sourceMappingURL=core.js.map + +/***/ }), + +/***/ 717: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// For internal use, subject to change. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fs = __importStar(__nccwpck_require__(57147)); +const os = __importStar(__nccwpck_require__(22037)); +const uuid_1 = __nccwpck_require__(75840); +const utils_1 = __nccwpck_require__(5278); +function issueFileCommand(command, message) { + const filePath = process.env[`GITHUB_${command}`]; + if (!filePath) { + throw new Error(`Unable to find environment variable for file command ${command}`); + } + if (!fs.existsSync(filePath)) { + throw new Error(`Missing file at path: ${filePath}`); + } + fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { + encoding: 'utf8' + }); +} +exports.issueFileCommand = issueFileCommand; +function prepareKeyValueMessage(key, value) { + const delimiter = `ghadelimiter_${uuid_1.v4()}`; + const convertedValue = utils_1.toCommandValue(value); + // These should realistically never happen, but just in case someone finds a + // way to exploit uuid generation let's not allow keys or values that contain + // the delimiter. + if (key.includes(delimiter)) { + throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); + } + if (convertedValue.includes(delimiter)) { + throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); + } + return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`; +} +exports.prepareKeyValueMessage = prepareKeyValueMessage; +//# sourceMappingURL=file-command.js.map + +/***/ }), + +/***/ 98041: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.OidcClient = void 0; +const http_client_1 = __nccwpck_require__(96255); +const auth_1 = __nccwpck_require__(35526); +const core_1 = __nccwpck_require__(42186); +class OidcClient { + static createHttpClient(allowRetry = true, maxRetry = 10) { + const requestOptions = { + allowRetries: allowRetry, + maxRetries: maxRetry + }; + return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions); + } + static getRequestToken() { + const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN']; + if (!token) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable'); + } + return token; + } + static getIDTokenUrl() { + const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL']; + if (!runtimeUrl) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable'); + } + return runtimeUrl; + } + static getCall(id_token_url) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const httpclient = OidcClient.createHttpClient(); + const res = yield httpclient + .getJson(id_token_url) + .catch(error => { + throw new Error(`Failed to get ID Token. \n + Error Code : ${error.statusCode}\n + Error Message: ${error.message}`); + }); + const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; + if (!id_token) { + throw new Error('Response json body do not have ID Token field'); + } + return id_token; + }); + } + static getIDToken(audience) { + return __awaiter(this, void 0, void 0, function* () { + try { + // New ID Token is requested from action service + let id_token_url = OidcClient.getIDTokenUrl(); + if (audience) { + const encodedAudience = encodeURIComponent(audience); + id_token_url = `${id_token_url}&audience=${encodedAudience}`; + } + core_1.debug(`ID token url is ${id_token_url}`); + const id_token = yield OidcClient.getCall(id_token_url); + core_1.setSecret(id_token); + return id_token; + } + catch (error) { + throw new Error(`Error message: ${error.message}`); + } + }); + } +} +exports.OidcClient = OidcClient; +//# sourceMappingURL=oidc-utils.js.map + +/***/ }), + +/***/ 2981: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; +const path = __importStar(__nccwpck_require__(71017)); +/** + * toPosixPath converts the given path to the posix form. On Windows, \\ will be + * replaced with /. + * + * @param pth. Path to transform. + * @return string Posix path. + */ +function toPosixPath(pth) { + return pth.replace(/[\\]/g, '/'); +} +exports.toPosixPath = toPosixPath; +/** + * toWin32Path converts the given path to the win32 form. On Linux, / will be + * replaced with \\. + * + * @param pth. Path to transform. + * @return string Win32 path. + */ +function toWin32Path(pth) { + return pth.replace(/[/]/g, '\\'); +} +exports.toWin32Path = toWin32Path; +/** + * toPlatformPath converts the given path to a platform-specific path. It does + * this by replacing instances of / and \ with the platform-specific path + * separator. + * + * @param pth The path to platformize. + * @return string The platform-specific path. + */ +function toPlatformPath(pth) { + return pth.replace(/[/\\]/g, path.sep); +} +exports.toPlatformPath = toPlatformPath; +//# sourceMappingURL=path-utils.js.map + +/***/ }), + +/***/ 81327: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; +const os_1 = __nccwpck_require__(22037); +const fs_1 = __nccwpck_require__(57147); +const { access, appendFile, writeFile } = fs_1.promises; +exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; +exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; +class Summary { + constructor() { + this._buffer = ''; + } + /** + * Finds the summary file path from the environment, rejects if env var is not found or file does not exist + * Also checks r/w permissions. + * + * @returns step summary file path + */ + filePath() { + return __awaiter(this, void 0, void 0, function* () { + if (this._filePath) { + return this._filePath; + } + const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; + if (!pathFromEnv) { + throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); + } + try { + yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); + } + catch (_a) { + throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); + } + this._filePath = pathFromEnv; + return this._filePath; + }); + } + /** + * Wraps content in an HTML tag, adding any HTML attributes + * + * @param {string} tag HTML tag to wrap + * @param {string | null} content content within the tag + * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add + * + * @returns {string} content wrapped in HTML element + */ + wrap(tag, content, attrs = {}) { + const htmlAttrs = Object.entries(attrs) + .map(([key, value]) => ` ${key}="${value}"`) + .join(''); + if (!content) { + return `<${tag}${htmlAttrs}>`; + } + return `<${tag}${htmlAttrs}>${content}`; + } + /** + * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. + * + * @param {SummaryWriteOptions} [options] (optional) options for write operation + * + * @returns {Promise} summary instance + */ + write(options) { + return __awaiter(this, void 0, void 0, function* () { + const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); + const filePath = yield this.filePath(); + const writeFunc = overwrite ? writeFile : appendFile; + yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); + return this.emptyBuffer(); + }); + } + /** + * Clears the summary buffer and wipes the summary file + * + * @returns {Summary} summary instance + */ + clear() { + return __awaiter(this, void 0, void 0, function* () { + return this.emptyBuffer().write({ overwrite: true }); + }); + } + /** + * Returns the current summary buffer as a string + * + * @returns {string} string of summary buffer + */ + stringify() { + return this._buffer; + } + /** + * If the summary buffer is empty + * + * @returns {boolen} true if the buffer is empty + */ + isEmptyBuffer() { + return this._buffer.length === 0; + } + /** + * Resets the summary buffer without writing to summary file + * + * @returns {Summary} summary instance + */ + emptyBuffer() { + this._buffer = ''; + return this; + } + /** + * Adds raw text to the summary buffer + * + * @param {string} text content to add + * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) + * + * @returns {Summary} summary instance + */ + addRaw(text, addEOL = false) { + this._buffer += text; + return addEOL ? this.addEOL() : this; + } + /** + * Adds the operating system-specific end-of-line marker to the buffer + * + * @returns {Summary} summary instance + */ + addEOL() { + return this.addRaw(os_1.EOL); + } + /** + * Adds an HTML codeblock to the summary buffer + * + * @param {string} code content to render within fenced code block + * @param {string} lang (optional) language to syntax highlight code + * + * @returns {Summary} summary instance + */ + addCodeBlock(code, lang) { + const attrs = Object.assign({}, (lang && { lang })); + const element = this.wrap('pre', this.wrap('code', code), attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML list to the summary buffer + * + * @param {string[]} items list of items to render + * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) + * + * @returns {Summary} summary instance + */ + addList(items, ordered = false) { + const tag = ordered ? 'ol' : 'ul'; + const listItems = items.map(item => this.wrap('li', item)).join(''); + const element = this.wrap(tag, listItems); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML table to the summary buffer + * + * @param {SummaryTableCell[]} rows table rows + * + * @returns {Summary} summary instance + */ + addTable(rows) { + const tableBody = rows + .map(row => { + const cells = row + .map(cell => { + if (typeof cell === 'string') { + return this.wrap('td', cell); + } + const { header, data, colspan, rowspan } = cell; + const tag = header ? 'th' : 'td'; + const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); + return this.wrap(tag, data, attrs); + }) + .join(''); + return this.wrap('tr', cells); + }) + .join(''); + const element = this.wrap('table', tableBody); + return this.addRaw(element).addEOL(); + } + /** + * Adds a collapsable HTML details element to the summary buffer + * + * @param {string} label text for the closed state + * @param {string} content collapsable content + * + * @returns {Summary} summary instance + */ + addDetails(label, content) { + const element = this.wrap('details', this.wrap('summary', label) + content); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML image tag to the summary buffer + * + * @param {string} src path to the image you to embed + * @param {string} alt text description of the image + * @param {SummaryImageOptions} options (optional) addition image attributes + * + * @returns {Summary} summary instance + */ + addImage(src, alt, options) { + const { width, height } = options || {}; + const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); + const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML section heading element + * + * @param {string} text heading text + * @param {number | string} [level=1] (optional) the heading level, default: 1 + * + * @returns {Summary} summary instance + */ + addHeading(text, level) { + const tag = `h${level}`; + const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) + ? tag + : 'h1'; + const element = this.wrap(allowedTag, text); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML thematic break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addSeparator() { + const element = this.wrap('hr', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML line break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addBreak() { + const element = this.wrap('br', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML blockquote to the summary buffer + * + * @param {string} text quote text + * @param {string} cite (optional) citation url + * + * @returns {Summary} summary instance + */ + addQuote(text, cite) { + const attrs = Object.assign({}, (cite && { cite })); + const element = this.wrap('blockquote', text, attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML anchor tag to the summary buffer + * + * @param {string} text link text/content + * @param {string} href hyperlink + * + * @returns {Summary} summary instance + */ + addLink(text, href) { + const element = this.wrap('a', text, { href }); + return this.addRaw(element).addEOL(); + } +} +const _summary = new Summary(); +/** + * @deprecated use `core.summary` + */ +exports.markdownSummary = _summary; +exports.summary = _summary; +//# sourceMappingURL=summary.js.map + +/***/ }), + +/***/ 5278: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toCommandProperties = exports.toCommandValue = void 0; +/** + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string + */ +function toCommandValue(input) { + if (input === null || input === undefined) { + return ''; + } + else if (typeof input === 'string' || input instanceof String) { + return input; + } + return JSON.stringify(input); +} +exports.toCommandValue = toCommandValue; +/** + * + * @param annotationProperties + * @returns The command properties to send with the actual annotation command + * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 + */ +function toCommandProperties(annotationProperties) { + if (!Object.keys(annotationProperties).length) { + return {}; + } + return { + title: annotationProperties.title, + file: annotationProperties.file, + line: annotationProperties.startLine, + endLine: annotationProperties.endLine, + col: annotationProperties.startColumn, + endColumn: annotationProperties.endColumn + }; +} +exports.toCommandProperties = toCommandProperties; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 74087: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Context = void 0; +const fs_1 = __nccwpck_require__(57147); +const os_1 = __nccwpck_require__(22037); +class Context { + /** + * Hydrate the context from the environment + */ + constructor() { + var _a, _b, _c; + this.payload = {}; + if (process.env.GITHUB_EVENT_PATH) { + if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { + this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); + } + else { + const path = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); + } + } + this.eventName = process.env.GITHUB_EVENT_NAME; + this.sha = process.env.GITHUB_SHA; + this.ref = process.env.GITHUB_REF; + this.workflow = process.env.GITHUB_WORKFLOW; + this.action = process.env.GITHUB_ACTION; + this.actor = process.env.GITHUB_ACTOR; + this.job = process.env.GITHUB_JOB; + this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); + this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); + this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; + this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; + this.graphqlUrl = + (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; + } + get issue() { + const payload = this.payload; + return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); + } + get repo() { + if (process.env.GITHUB_REPOSITORY) { + const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); + return { owner, repo }; + } + if (this.payload.repository) { + return { + owner: this.payload.repository.owner.login, + repo: this.payload.repository.name + }; + } + throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); + } +} +exports.Context = Context; +//# sourceMappingURL=context.js.map + +/***/ }), + +/***/ 95438: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOctokit = exports.context = void 0; +const Context = __importStar(__nccwpck_require__(74087)); +const utils_1 = __nccwpck_require__(73030); +exports.context = new Context.Context(); +/** + * Returns a hydrated octokit ready to use for GitHub Actions + * + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set + */ +function getOctokit(token, options, ...additionalPlugins) { + const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins); + return new GitHubWithPlugins((0, utils_1.getOctokitOptions)(token, options)); +} +exports.getOctokit = getOctokit; +//# sourceMappingURL=github.js.map + +/***/ }), + +/***/ 47914: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getApiBaseUrl = exports.getProxyFetch = exports.getProxyAgentDispatcher = exports.getProxyAgent = exports.getAuthString = void 0; +const httpClient = __importStar(__nccwpck_require__(96255)); +const undici_1 = __nccwpck_require__(41773); +function getAuthString(token, options) { + if (!token && !options.auth) { + throw new Error('Parameter token or opts.auth is required'); + } + else if (token && options.auth) { + throw new Error('Parameters token and opts.auth may not both be specified'); + } + return typeof options.auth === 'string' ? options.auth : `token ${token}`; +} +exports.getAuthString = getAuthString; +function getProxyAgent(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgent(destinationUrl); +} +exports.getProxyAgent = getProxyAgent; +function getProxyAgentDispatcher(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgentDispatcher(destinationUrl); +} +exports.getProxyAgentDispatcher = getProxyAgentDispatcher; +function getProxyFetch(destinationUrl) { + const httpDispatcher = getProxyAgentDispatcher(destinationUrl); + const proxyFetch = (url, opts) => __awaiter(this, void 0, void 0, function* () { + return (0, undici_1.fetch)(url, Object.assign(Object.assign({}, opts), { dispatcher: httpDispatcher })); + }); + return proxyFetch; +} +exports.getProxyFetch = getProxyFetch; +function getApiBaseUrl() { + return process.env['GITHUB_API_URL'] || 'https://api.github.com'; +} +exports.getApiBaseUrl = getApiBaseUrl; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 73030: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0; +const Context = __importStar(__nccwpck_require__(74087)); +const Utils = __importStar(__nccwpck_require__(47914)); +// octokit + plugins +const core_1 = __nccwpck_require__(76762); +const plugin_rest_endpoint_methods_1 = __nccwpck_require__(83044); +const plugin_paginate_rest_1 = __nccwpck_require__(64193); +exports.context = new Context.Context(); +const baseUrl = Utils.getApiBaseUrl(); +exports.defaults = { + baseUrl, + request: { + agent: Utils.getProxyAgent(baseUrl), + fetch: Utils.getProxyFetch(baseUrl) + } +}; +exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults); +/** + * Convience function to correctly format Octokit Options to pass into the constructor. + * + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set + */ +function getOctokitOptions(token, options) { + const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller + // Auth + const auth = Utils.getAuthString(token, opts); + if (auth) { + opts.auth = auth; + } + return opts; +} +exports.getOctokitOptions = getOctokitOptions; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 35526: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Bearer ${this.token}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; +//# sourceMappingURL=auth.js.map + +/***/ }), + +/***/ 96255: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/* eslint-disable @typescript-eslint/no-explicit-any */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; +const http = __importStar(__nccwpck_require__(13685)); +const https = __importStar(__nccwpck_require__(95687)); +const pm = __importStar(__nccwpck_require__(19835)); +const tunnel = __importStar(__nccwpck_require__(74294)); +const undici_1 = __nccwpck_require__(41773); +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes || (exports.HttpCodes = HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers || (exports.Headers = Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes || (exports.MediaTypes = MediaTypes = {})); +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +function getProxyUrl(serverUrl) { + const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; +} +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); + } +} +exports.HttpClientError = HttpClientError; +class HttpClientResponse { + constructor(message) { + this.message = message; + } + readBody() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + })); + }); + } + readBodyBuffer() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + const chunks = []; + this.message.on('data', (chunk) => { + chunks.push(chunk); + }); + this.message.on('end', () => { + resolve(Buffer.concat(chunks)); + }); + })); + }); + } +} +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + const parsedUrl = new URL(requestUrl); + return parsedUrl.protocol === 'https:'; +} +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } + } + options(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + }); + } + get(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + }); + } + del(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + }); + } + post(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + }); + } + patch(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + }); + } + put(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + }); + } + head(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + }); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request(verb, requestUrl, stream, additionalHeaders); + }); + } + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + getJson(requestUrl, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + const res = yield this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + postJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + putJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + patchJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + request(verb, requestUrl, data, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + const parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + do { + response = yield this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (const handler of this.handlers) { + if (handler.canHandleAuthentication(response)) { + authenticationHandler = handler; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (response.message.statusCode && + HttpRedirectCodes.includes(response.message.statusCode) && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + const parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol === 'https:' && + parsedUrl.protocol !== parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + yield response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (const header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = yield this.requestRaw(info, data); + redirectsRemaining--; + } + if (!response.message.statusCode || + !HttpResponseRetryCodes.includes(response.message.statusCode)) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + yield response.readBody(); + yield this._performExponentialBackoff(numTries); + } + } while (numTries < maxTries); + return response; + }); + } + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + function callbackForResult(err, res) { + if (err) { + reject(err); + } + else if (!res) { + // If `err` is not passed, then `res` must be passed. + reject(new Error('Unknown error')); + } + else { + resolve(res); + } + } + this.requestRawWithCallback(info, data, callbackForResult); + }); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + if (typeof data === 'string') { + if (!info.options.headers) { + info.options.headers = {}; + } + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + function handleResult(err, res) { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + } + const req = info.httpModule.request(info.options, (msg) => { + const res = new HttpClientResponse(msg); + handleResult(undefined, res); + }); + let socket; + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error(`Request timeout: ${info.options.path}`)); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } + } + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + const parsedUrl = new URL(serverUrl); + return this._getAgent(parsedUrl); + } + getAgentDispatcher(serverUrl) { + const parsedUrl = new URL(serverUrl); + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (!useProxy) { + return; + } + return this._getProxyAgentDispatcher(parsedUrl, proxyUrl); + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + for (const handler of this.handlers) { + handler.prepareRequest(info.options); + } + } + return info; + } + _mergeHeaders(headers) { + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); + } + return lowercaseKeys(headers || {}); + } + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (this._keepAlive && !useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. + if (proxyUrl && proxyUrl.hostname) { + const agentOptions = { + maxSockets, + keepAlive: this._keepAlive, + proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + })), { host: proxyUrl.hostname, port: proxyUrl.port }) + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if reusing agent across request and tunneling agent isn't assigned create a new agent + if (this._keepAlive && !agent) { + const options = { keepAlive: this._keepAlive, maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + // if not using private agent and tunnel agent isn't setup then use global agent + if (!agent) { + agent = usingSsl ? https.globalAgent : http.globalAgent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _getProxyAgentDispatcher(parsedUrl, proxyUrl) { + let proxyAgent; + if (this._keepAlive) { + proxyAgent = this._proxyAgentDispatcher; + } + // if agent is already assigned use that agent. + if (proxyAgent) { + return proxyAgent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && { + token: `${proxyUrl.username}:${proxyUrl.password}` + }))); + this._proxyAgentDispatcher = proxyAgent; + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, { + rejectUnauthorized: false + }); + } + return proxyAgent; + } + _performExponentialBackoff(retryNumber) { + return __awaiter(this, void 0, void 0, function* () { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + }); + } + _processResponse(res, options) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + const statusCode = res.message.statusCode || 0; + const response = { + statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode === HttpCodes.NotFound) { + resolve(response); + } + // get the result from the body + function dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + const a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + let obj; + let contents; + try { + contents = yield res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = `Failed request: (${statusCode})`; + } + const err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); + } + else { + resolve(response); + } + })); + }); + } +} +exports.HttpClient = HttpClient; +const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 19835: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.checkBypass = exports.getProxyUrl = void 0; +function getProxyUrl(reqUrl) { + const usingSsl = reqUrl.protocol === 'https:'; + if (checkBypass(reqUrl)) { + return undefined; + } + const proxyVar = (() => { + if (usingSsl) { + return process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + return process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + })(); + if (proxyVar) { + try { + return new URL(proxyVar); + } + catch (_a) { + if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) + return new URL(`http://${proxyVar}`); + } + } + else { + return undefined; + } +} +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { + return false; + } + const reqHost = reqUrl.hostname; + if (isLoopbackAddress(reqHost)) { + return true; + } + const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { + return false; + } + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); + } + else if (reqUrl.protocol === 'http:') { + reqPort = 80; + } + else if (reqUrl.protocol === 'https:') { + reqPort = 443; + } + // Format the request hostname and hostname with port + const upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (const upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperNoProxyItem === '*' || + upperReqHosts.some(x => x === upperNoProxyItem || + x.endsWith(`.${upperNoProxyItem}`) || + (upperNoProxyItem.startsWith('.') && + x.endsWith(`${upperNoProxyItem}`)))) { + return true; + } + } + return false; +} +exports.checkBypass = checkBypass; +function isLoopbackAddress(host) { + const hostLower = host.toLowerCase(); + return (hostLower === 'localhost' || + hostLower.startsWith('127.') || + hostLower.startsWith('[::1]') || + hostLower.startsWith('[0:0:0:0:0:0:0:1]')); +} +//# sourceMappingURL=proxy.js.map + +/***/ }), + +/***/ 40334: +/***/ ((module) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + createTokenAuth: () => createTokenAuth +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/auth.js +var REGEX_IS_INSTALLATION_LEGACY = /^v1\./; +var REGEX_IS_INSTALLATION = /^ghs_/; +var REGEX_IS_USER_TO_SERVER = /^ghu_/; +async function auth(token) { + const isApp = token.split(/\./).length === 3; + const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); + const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; + return { + type: "token", + token, + tokenType + }; +} + +// pkg/dist-src/with-authorization-prefix.js +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; + } + return `token ${token}`; +} + +// pkg/dist-src/hook.js +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge( + route, + parameters + ); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} + +// pkg/dist-src/index.js +var createTokenAuth = function createTokenAuth2(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } + if (typeof token !== "string") { + throw new Error( + "[@octokit/auth-token] Token passed to createTokenAuth is not a string" + ); + } + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 76762: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + Octokit: () => Octokit +}); +module.exports = __toCommonJS(dist_src_exports); +var import_universal_user_agent = __nccwpck_require__(45030); +var import_before_after_hook = __nccwpck_require__(83682); +var import_request = __nccwpck_require__(36234); +var import_graphql = __nccwpck_require__(88467); +var import_auth_token = __nccwpck_require__(40334); + +// pkg/dist-src/version.js +var VERSION = "5.0.2"; + +// pkg/dist-src/index.js +var noop = () => { +}; +var consoleWarn = console.warn.bind(console); +var consoleError = console.error.bind(console); +var userAgentTrail = `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var Octokit = class { + static { + this.VERSION = VERSION; + } + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + super( + Object.assign( + {}, + defaults, + options, + options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null + ) + ); + } + }; + return OctokitWithDefaults; + } + static { + this.plugins = []; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + static plugin(...newPlugins) { + const currentPlugins = this.plugins; + const NewOctokit = class extends this { + static { + this.plugins = currentPlugins.concat( + newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) + ); + } + }; + return NewOctokit; + } + constructor(options = {}) { + const hook = new import_before_after_hook.Collection(); + const requestDefaults = { + baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type + hook: hook.bind(null, "request") + }), + mediaType: { + previews: [], + format: "" + } + }; + requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail; + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; + } + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; + } + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } + this.request = import_request.request.defaults(requestDefaults); + this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults); + this.log = Object.assign( + { + debug: noop, + info: noop, + warn: consoleWarn, + error: consoleError + }, + options.log + ); + this.hook = hook; + if (!options.authStrategy) { + if (!options.auth) { + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + const auth = (0, import_auth_token.createTokenAuth)(options.auth); + hook.wrap("request", auth.hook); + this.auth = auth; + } + } else { + const { authStrategy, ...otherOptions } = options; + const auth = authStrategy( + Object.assign( + { + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions + }, + options.auth + ) + ); + hook.wrap("request", auth.hook); + this.auth = auth; + } + const classConstructor = this.constructor; + for (let i = 0; i < classConstructor.plugins.length; ++i) { + Object.assign(this, classConstructor.plugins[i](this, options)); + } + } +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 59440: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + endpoint: () => endpoint +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/defaults.js +var import_universal_user_agent = __nccwpck_require__(45030); + +// pkg/dist-src/version.js +var VERSION = "9.0.4"; + +// pkg/dist-src/defaults.js +var userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "" + } +}; + +// pkg/dist-src/util/lowercase-keys.js +function lowercaseKeys(object) { + if (!object) { + return {}; + } + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} + +// pkg/dist-src/util/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} + +// pkg/dist-src/util/merge-deep.js +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach((key) => { + if (isPlainObject(options[key])) { + if (!(key in defaults)) + Object.assign(result, { [key]: options[key] }); + else + result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { [key]: options[key] }); + } + }); + return result; +} + +// pkg/dist-src/util/remove-undefined-properties.js +function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === void 0) { + delete obj[key]; + } + } + return obj; +} + +// pkg/dist-src/merge.js +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { method, url } : { url: method }, options); + } else { + options = Object.assign({}, route); + } + options.headers = lowercaseKeys(options.headers); + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); + if (options.url === "/graphql") { + if (defaults && defaults.mediaType.previews?.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter( + (preview) => !mergedOptions.mediaType.previews.includes(preview) + ).concat(mergedOptions.mediaType.previews); + } + mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, "")); + } + return mergedOptions; +} + +// pkg/dist-src/util/add-query-parameters.js +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + if (names.length === 0) { + return url; + } + return url + separator + names.map((name) => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + } + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); +} + +// pkg/dist-src/util/extract-url-variable-names.js +var urlVariableRegex = /\{[^}]+\}/g; +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + if (!matches) { + return []; + } + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} + +// pkg/dist-src/util/omit.js +function omit(object, keysToOmit) { + const result = { __proto__: null }; + for (const key of Object.keys(object)) { + if (keysToOmit.indexOf(key) === -1) { + result[key] = object[key]; + } + } + return result; +} + +// pkg/dist-src/util/url-template.js +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + } + return part; + }).join(""); +} +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; + } +} +function isDefined(value) { + return value !== void 0 && value !== null; +} +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} +function getValues(context, operator, key, modifier) { + var value = context[key], result = []; + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } + result.push( + encodeValue(operator, value, isKeyOperator(operator) ? key : "") + ); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + result.push( + encodeValue(operator, value2, isKeyOperator(operator) ? key : "") + ); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } + } else { + const tmp = []; + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + tmp.push(encodeValue(operator, value2)); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); + } + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); + } + } + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); + } + } + return result; +} +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; +} +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + template = template.replace( + /\{([^\{\}]+)\}|([^\{\}]+)/g, + function(_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } + expression.split(/,/g).forEach(function(variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + if (operator && operator !== "+") { + var separator = ","; + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); + } + } else { + return encodeReserved(literal); + } + } + ); + if (template === "/") { + return template; + } else { + return template.replace(/\/$/, ""); + } +} + +// pkg/dist-src/parse.js +function parse(options) { + let method = options.method.toUpperCase(); + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, [ + "method", + "baseUrl", + "url", + "headers", + "request", + "mediaType" + ]); + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } + const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); + if (!isBinaryRequest) { + if (options.mediaType.format) { + headers.accept = headers.accept.split(/,/).map( + (format) => format.replace( + /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, + `application/vnd$1$2.${options.mediaType.format}` + ) + ).join(","); + } + if (url.endsWith("/graphql")) { + if (options.mediaType.previews?.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } + } + } + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } + } + } + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } + return Object.assign( + { method, url, headers }, + typeof body !== "undefined" ? { body } : null, + options.request ? { request: options.request } : null + ); +} + +// pkg/dist-src/endpoint-with-defaults.js +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); +} + +// pkg/dist-src/with-defaults.js +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS2 = merge(oldDefaults, newDefaults); + const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); + return Object.assign(endpoint2, { + DEFAULTS: DEFAULTS2, + defaults: withDefaults.bind(null, DEFAULTS2), + merge: merge.bind(null, DEFAULTS2), + parse + }); +} + +// pkg/dist-src/index.js +var endpoint = withDefaults(null, DEFAULTS); +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 88467: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + GraphqlResponseError: () => GraphqlResponseError, + graphql: () => graphql2, + withCustomRequest: () => withCustomRequest +}); +module.exports = __toCommonJS(dist_src_exports); +var import_request3 = __nccwpck_require__(36234); +var import_universal_user_agent = __nccwpck_require__(45030); + +// pkg/dist-src/version.js +var VERSION = "7.0.2"; + +// pkg/dist-src/with-defaults.js +var import_request2 = __nccwpck_require__(36234); + +// pkg/dist-src/graphql.js +var import_request = __nccwpck_require__(36234); + +// pkg/dist-src/error.js +function _buildMessageForResponseErrors(data) { + return `Request failed due to following response errors: +` + data.errors.map((e) => ` - ${e.message}`).join("\n"); +} +var GraphqlResponseError = class extends Error { + constructor(request2, headers, response) { + super(_buildMessageForResponseErrors(response)); + this.request = request2; + this.headers = headers; + this.response = response; + this.name = "GraphqlResponseError"; + this.errors = response.errors; + this.data = response.data; + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + } +}; + +// pkg/dist-src/graphql.js +var NON_VARIABLE_OPTIONS = [ + "method", + "baseUrl", + "url", + "headers", + "request", + "query", + "mediaType" +]; +var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; +var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; +function graphql(request2, query, options) { + if (options) { + if (typeof query === "string" && "query" in options) { + return Promise.reject( + new Error(`[@octokit/graphql] "query" cannot be used as variable name`) + ); + } + for (const key in options) { + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) + continue; + return Promise.reject( + new Error( + `[@octokit/graphql] "${key}" cannot be used as variable name` + ) + ); + } + } + const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; + const requestOptions = Object.keys( + parsedOptions + ).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = parsedOptions[key]; + return result; + } + if (!result.variables) { + result.variables = {}; + } + result.variables[key] = parsedOptions[key]; + return result; + }, {}); + const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; + if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { + requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); + } + return request2(requestOptions).then((response) => { + if (response.data.errors) { + const headers = {}; + for (const key of Object.keys(response.headers)) { + headers[key] = response.headers[key]; + } + throw new GraphqlResponseError( + requestOptions, + headers, + response.data + ); + } + return response.data.data; + }); +} + +// pkg/dist-src/with-defaults.js +function withDefaults(request2, newDefaults) { + const newRequest = request2.defaults(newDefaults); + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: newRequest.endpoint + }); +} + +// pkg/dist-src/index.js +var graphql2 = withDefaults(import_request3.request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + }, + method: "POST", + url: "/graphql" +}); +function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); +} +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 64193: +/***/ ((module) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + composePaginateRest: () => composePaginateRest, + isPaginatingEndpoint: () => isPaginatingEndpoint, + paginateRest: () => paginateRest, + paginatingEndpoints: () => paginatingEndpoints +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/version.js +var VERSION = "9.1.5"; + +// pkg/dist-src/normalize-paginated-list-response.js +function normalizePaginatedListResponse(response) { + if (!response.data) { + return { + ...response, + data: [] + }; + } + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) + return response; + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; + } + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; + } + response.data.total_count = totalCount; + return response; +} + +// pkg/dist-src/iterator.js +function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + async next() { + if (!url) + return { done: true }; + try { + const response = await requestMethod({ method, url, headers }); + const normalizedResponse = normalizePaginatedListResponse(response); + url = ((normalizedResponse.headers.link || "").match( + /<([^>]+)>;\s*rel="next"/ + ) || [])[1]; + return { value: normalizedResponse }; + } catch (error) { + if (error.status !== 409) + throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [] + } + }; + } + } + }) + }; +} + +// pkg/dist-src/paginate.js +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = void 0; + } + return gather( + octokit, + [], + iterator(octokit, route, parameters)[Symbol.asyncIterator](), + mapFn + ); +} +function gather(octokit, results, iterator2, mapFn) { + return iterator2.next().then((result) => { + if (result.done) { + return results; + } + let earlyExit = false; + function done() { + earlyExit = true; + } + results = results.concat( + mapFn ? mapFn(result.value, done) : result.value.data + ); + if (earlyExit) { + return results; + } + return gather(octokit, results, iterator2, mapFn); + }); +} + +// pkg/dist-src/compose-paginate.js +var composePaginateRest = Object.assign(paginate, { + iterator +}); + +// pkg/dist-src/generated/paginating-endpoints.js +var paginatingEndpoints = [ + "GET /advisories", + "GET /app/hook/deliveries", + "GET /app/installation-requests", + "GET /app/installations", + "GET /assignments/{assignment_id}/accepted_assignments", + "GET /classrooms", + "GET /classrooms/{classroom_id}/assignments", + "GET /enterprises/{enterprise}/dependabot/alerts", + "GET /enterprises/{enterprise}/secret-scanning/alerts", + "GET /events", + "GET /gists", + "GET /gists/public", + "GET /gists/starred", + "GET /gists/{gist_id}/comments", + "GET /gists/{gist_id}/commits", + "GET /gists/{gist_id}/forks", + "GET /installation/repositories", + "GET /issues", + "GET /licenses", + "GET /marketplace_listing/plans", + "GET /marketplace_listing/plans/{plan_id}/accounts", + "GET /marketplace_listing/stubbed/plans", + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", + "GET /networks/{owner}/{repo}/events", + "GET /notifications", + "GET /organizations", + "GET /orgs/{org}/actions/cache/usage-by-repository", + "GET /orgs/{org}/actions/permissions/repositories", + "GET /orgs/{org}/actions/runners", + "GET /orgs/{org}/actions/secrets", + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", + "GET /orgs/{org}/actions/variables", + "GET /orgs/{org}/actions/variables/{name}/repositories", + "GET /orgs/{org}/blocks", + "GET /orgs/{org}/code-scanning/alerts", + "GET /orgs/{org}/codespaces", + "GET /orgs/{org}/codespaces/secrets", + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories", + "GET /orgs/{org}/copilot/billing/seats", + "GET /orgs/{org}/dependabot/alerts", + "GET /orgs/{org}/dependabot/secrets", + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", + "GET /orgs/{org}/events", + "GET /orgs/{org}/failed_invitations", + "GET /orgs/{org}/hooks", + "GET /orgs/{org}/hooks/{hook_id}/deliveries", + "GET /orgs/{org}/installations", + "GET /orgs/{org}/invitations", + "GET /orgs/{org}/invitations/{invitation_id}/teams", + "GET /orgs/{org}/issues", + "GET /orgs/{org}/members", + "GET /orgs/{org}/members/{username}/codespaces", + "GET /orgs/{org}/migrations", + "GET /orgs/{org}/migrations/{migration_id}/repositories", + "GET /orgs/{org}/outside_collaborators", + "GET /orgs/{org}/packages", + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + "GET /orgs/{org}/personal-access-token-requests", + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories", + "GET /orgs/{org}/personal-access-tokens", + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories", + "GET /orgs/{org}/projects", + "GET /orgs/{org}/properties/values", + "GET /orgs/{org}/public_members", + "GET /orgs/{org}/repos", + "GET /orgs/{org}/rulesets", + "GET /orgs/{org}/rulesets/rule-suites", + "GET /orgs/{org}/secret-scanning/alerts", + "GET /orgs/{org}/security-advisories", + "GET /orgs/{org}/teams", + "GET /orgs/{org}/teams/{team_slug}/discussions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/invitations", + "GET /orgs/{org}/teams/{team_slug}/members", + "GET /orgs/{org}/teams/{team_slug}/projects", + "GET /orgs/{org}/teams/{team_slug}/repos", + "GET /orgs/{org}/teams/{team_slug}/teams", + "GET /projects/columns/{column_id}/cards", + "GET /projects/{project_id}/collaborators", + "GET /projects/{project_id}/columns", + "GET /repos/{owner}/{repo}/actions/artifacts", + "GET /repos/{owner}/{repo}/actions/caches", + "GET /repos/{owner}/{repo}/actions/organization-secrets", + "GET /repos/{owner}/{repo}/actions/organization-variables", + "GET /repos/{owner}/{repo}/actions/runners", + "GET /repos/{owner}/{repo}/actions/runs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", + "GET /repos/{owner}/{repo}/actions/secrets", + "GET /repos/{owner}/{repo}/actions/variables", + "GET /repos/{owner}/{repo}/actions/workflows", + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", + "GET /repos/{owner}/{repo}/activity", + "GET /repos/{owner}/{repo}/assignees", + "GET /repos/{owner}/{repo}/branches", + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", + "GET /repos/{owner}/{repo}/code-scanning/alerts", + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + "GET /repos/{owner}/{repo}/code-scanning/analyses", + "GET /repos/{owner}/{repo}/codespaces", + "GET /repos/{owner}/{repo}/codespaces/devcontainers", + "GET /repos/{owner}/{repo}/codespaces/secrets", + "GET /repos/{owner}/{repo}/collaborators", + "GET /repos/{owner}/{repo}/comments", + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/commits", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", + "GET /repos/{owner}/{repo}/commits/{ref}/status", + "GET /repos/{owner}/{repo}/commits/{ref}/statuses", + "GET /repos/{owner}/{repo}/contributors", + "GET /repos/{owner}/{repo}/dependabot/alerts", + "GET /repos/{owner}/{repo}/dependabot/secrets", + "GET /repos/{owner}/{repo}/deployments", + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + "GET /repos/{owner}/{repo}/environments", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", + "GET /repos/{owner}/{repo}/events", + "GET /repos/{owner}/{repo}/forks", + "GET /repos/{owner}/{repo}/hooks", + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", + "GET /repos/{owner}/{repo}/invitations", + "GET /repos/{owner}/{repo}/issues", + "GET /repos/{owner}/{repo}/issues/comments", + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/issues/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", + "GET /repos/{owner}/{repo}/issues/{issue_number}/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", + "GET /repos/{owner}/{repo}/keys", + "GET /repos/{owner}/{repo}/labels", + "GET /repos/{owner}/{repo}/milestones", + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", + "GET /repos/{owner}/{repo}/notifications", + "GET /repos/{owner}/{repo}/pages/builds", + "GET /repos/{owner}/{repo}/projects", + "GET /repos/{owner}/{repo}/pulls", + "GET /repos/{owner}/{repo}/pulls/comments", + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", + "GET /repos/{owner}/{repo}/releases", + "GET /repos/{owner}/{repo}/releases/{release_id}/assets", + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", + "GET /repos/{owner}/{repo}/rules/branches/{branch}", + "GET /repos/{owner}/{repo}/rulesets", + "GET /repos/{owner}/{repo}/rulesets/rule-suites", + "GET /repos/{owner}/{repo}/secret-scanning/alerts", + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", + "GET /repos/{owner}/{repo}/security-advisories", + "GET /repos/{owner}/{repo}/stargazers", + "GET /repos/{owner}/{repo}/subscribers", + "GET /repos/{owner}/{repo}/tags", + "GET /repos/{owner}/{repo}/teams", + "GET /repos/{owner}/{repo}/topics", + "GET /repositories", + "GET /repositories/{repository_id}/environments/{environment_name}/secrets", + "GET /repositories/{repository_id}/environments/{environment_name}/variables", + "GET /search/code", + "GET /search/commits", + "GET /search/issues", + "GET /search/labels", + "GET /search/repositories", + "GET /search/topics", + "GET /search/users", + "GET /teams/{team_id}/discussions", + "GET /teams/{team_id}/discussions/{discussion_number}/comments", + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /teams/{team_id}/discussions/{discussion_number}/reactions", + "GET /teams/{team_id}/invitations", + "GET /teams/{team_id}/members", + "GET /teams/{team_id}/projects", + "GET /teams/{team_id}/repos", + "GET /teams/{team_id}/teams", + "GET /user/blocks", + "GET /user/codespaces", + "GET /user/codespaces/secrets", + "GET /user/emails", + "GET /user/followers", + "GET /user/following", + "GET /user/gpg_keys", + "GET /user/installations", + "GET /user/installations/{installation_id}/repositories", + "GET /user/issues", + "GET /user/keys", + "GET /user/marketplace_purchases", + "GET /user/marketplace_purchases/stubbed", + "GET /user/memberships/orgs", + "GET /user/migrations", + "GET /user/migrations/{migration_id}/repositories", + "GET /user/orgs", + "GET /user/packages", + "GET /user/packages/{package_type}/{package_name}/versions", + "GET /user/public_emails", + "GET /user/repos", + "GET /user/repository_invitations", + "GET /user/social_accounts", + "GET /user/ssh_signing_keys", + "GET /user/starred", + "GET /user/subscriptions", + "GET /user/teams", + "GET /users", + "GET /users/{username}/events", + "GET /users/{username}/events/orgs/{org}", + "GET /users/{username}/events/public", + "GET /users/{username}/followers", + "GET /users/{username}/following", + "GET /users/{username}/gists", + "GET /users/{username}/gpg_keys", + "GET /users/{username}/keys", + "GET /users/{username}/orgs", + "GET /users/{username}/packages", + "GET /users/{username}/projects", + "GET /users/{username}/received_events", + "GET /users/{username}/received_events/public", + "GET /users/{username}/repos", + "GET /users/{username}/social_accounts", + "GET /users/{username}/ssh_signing_keys", + "GET /users/{username}/starred", + "GET /users/{username}/subscriptions" +]; + +// pkg/dist-src/paginating-endpoints.js +function isPaginatingEndpoint(arg) { + if (typeof arg === "string") { + return paginatingEndpoints.includes(arg); + } else { + return false; + } +} + +// pkg/dist-src/index.js +function paginateRest(octokit) { + return { + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit) + }) + }; +} +paginateRest.VERSION = VERSION; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 83044: +/***/ ((module) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + legacyRestEndpointMethods: () => legacyRestEndpointMethods, + restEndpointMethods: () => restEndpointMethods +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/version.js +var VERSION = "10.2.0"; + +// pkg/dist-src/generated/endpoints.js +var Endpoints = { + actions: { + addCustomLabelsToSelfHostedRunnerForOrg: [ + "POST /orgs/{org}/actions/runners/{runner_id}/labels" + ], + addCustomLabelsToSelfHostedRunnerForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + approveWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" + ], + cancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" + ], + createEnvironmentVariable: [ + "POST /repositories/{repository_id}/environments/{environment_name}/variables" + ], + createOrUpdateEnvironmentSecret: [ + "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + createOrgVariable: ["POST /orgs/{org}/actions/variables"], + createRegistrationTokenForOrg: [ + "POST /orgs/{org}/actions/runners/registration-token" + ], + createRegistrationTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/registration-token" + ], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/remove-token" + ], + createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], + createWorkflowDispatch: [ + "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" + ], + deleteActionsCacheById: [ + "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" + ], + deleteActionsCacheByKey: [ + "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" + ], + deleteArtifact: [ + "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" + ], + deleteEnvironmentSecret: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + deleteEnvironmentVariable: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + deleteRepoVariable: [ + "DELETE /repos/{owner}/{repo}/actions/variables/{name}" + ], + deleteSelfHostedRunnerFromOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}" + ], + deleteSelfHostedRunnerFromRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], + deleteWorkflowRunLogs: [ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + disableSelectedRepositoryGithubActionsOrganization: [ + "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + disableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" + ], + downloadArtifact: [ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" + ], + downloadJobLogsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" + ], + downloadWorkflowRunAttemptLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" + ], + downloadWorkflowRunLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + enableSelectedRepositoryGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + enableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" + ], + forceCancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel" + ], + generateRunnerJitconfigForOrg: [ + "POST /orgs/{org}/actions/runners/generate-jitconfig" + ], + generateRunnerJitconfigForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" + ], + getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], + getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], + getActionsCacheUsageByRepoForOrg: [ + "GET /orgs/{org}/actions/cache/usage-by-repository" + ], + getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], + getAllowedActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/selected-actions" + ], + getAllowedActionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getEnvironmentPublicKey: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key" + ], + getEnvironmentSecret: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + getEnvironmentVariable: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + getGithubActionsDefaultWorkflowPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions/workflow" + ], + getGithubActionsDefaultWorkflowPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/workflow" + ], + getGithubActionsPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions" + ], + getGithubActionsPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions" + ], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], + getPendingDeploymentsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + getRepoPermissions: [ + "GET /repos/{owner}/{repo}/actions/permissions", + {}, + { renamed: ["actions", "getGithubActionsPermissionsRepository"] } + ], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], + getReviewsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" + ], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowAccessToRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/access" + ], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" + ], + getWorkflowRunUsage: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" + ], + getWorkflowUsage: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" + ], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listEnvironmentSecrets: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets" + ], + listEnvironmentVariables: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables" + ], + listJobsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" + ], + listJobsForWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" + ], + listLabelsForSelfHostedRunnerForOrg: [ + "GET /orgs/{org}/actions/runners/{runner_id}/labels" + ], + listLabelsForSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listOrgVariables: ["GET /orgs/{org}/actions/variables"], + listRepoOrganizationSecrets: [ + "GET /repos/{owner}/{repo}/actions/organization-secrets" + ], + listRepoOrganizationVariables: [ + "GET /repos/{owner}/{repo}/actions/organization-variables" + ], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/downloads" + ], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + listSelectedReposForOrgVariable: [ + "GET /orgs/{org}/actions/variables/{name}/repositories" + ], + listSelectedRepositoriesEnabledGithubActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/repositories" + ], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowRunArtifacts: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" + ], + listWorkflowRuns: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" + ], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunJobForWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" + ], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + reRunWorkflowFailedJobs: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" + ], + removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" + ], + removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + removeCustomLabelFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" + ], + removeCustomLabelFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgVariable: [ + "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + reviewCustomGatesForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" + ], + reviewPendingDeploymentsForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + setAllowedActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/selected-actions" + ], + setAllowedActionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + setCustomLabelsForSelfHostedRunnerForOrg: [ + "PUT /orgs/{org}/actions/runners/{runner_id}/labels" + ], + setCustomLabelsForSelfHostedRunnerForRepo: [ + "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + setGithubActionsDefaultWorkflowPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/workflow" + ], + setGithubActionsDefaultWorkflowPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/workflow" + ], + setGithubActionsPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions" + ], + setGithubActionsPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories" + ], + setSelectedRepositoriesEnabledGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories" + ], + setWorkflowAccessToRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/access" + ], + updateEnvironmentVariable: [ + "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"], + updateRepoVariable: [ + "PATCH /repos/{owner}/{repo}/actions/variables/{name}" + ] + }, + activity: { + checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], + deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], + deleteThreadSubscription: [ + "DELETE /notifications/threads/{thread_id}/subscription" + ], + getFeeds: ["GET /feeds"], + getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], + getThread: ["GET /notifications/threads/{thread_id}"], + getThreadSubscriptionForAuthenticatedUser: [ + "GET /notifications/threads/{thread_id}/subscription" + ], + listEventsForAuthenticatedUser: ["GET /users/{username}/events"], + listNotificationsForAuthenticatedUser: ["GET /notifications"], + listOrgEventsForAuthenticatedUser: [ + "GET /users/{username}/events/orgs/{org}" + ], + listPublicEvents: ["GET /events"], + listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], + listPublicEventsForUser: ["GET /users/{username}/events/public"], + listPublicOrgEvents: ["GET /orgs/{org}/events"], + listReceivedEventsForUser: ["GET /users/{username}/received_events"], + listReceivedPublicEventsForUser: [ + "GET /users/{username}/received_events/public" + ], + listRepoEvents: ["GET /repos/{owner}/{repo}/events"], + listRepoNotificationsForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/notifications" + ], + listReposStarredByAuthenticatedUser: ["GET /user/starred"], + listReposStarredByUser: ["GET /users/{username}/starred"], + listReposWatchedByUser: ["GET /users/{username}/subscriptions"], + listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], + listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], + listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], + markNotificationsAsRead: ["PUT /notifications"], + markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], + markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], + setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], + setThreadSubscription: [ + "PUT /notifications/threads/{thread_id}/subscription" + ], + starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], + unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] + }, + apps: { + addRepoToInstallation: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] } + ], + addRepoToInstallationForAuthenticatedUser: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}" + ], + checkToken: ["POST /applications/{client_id}/token"], + createFromManifest: ["POST /app-manifests/{code}/conversions"], + createInstallationAccessToken: [ + "POST /app/installations/{installation_id}/access_tokens" + ], + deleteAuthorization: ["DELETE /applications/{client_id}/grant"], + deleteInstallation: ["DELETE /app/installations/{installation_id}"], + deleteToken: ["DELETE /applications/{client_id}/token"], + getAuthenticated: ["GET /app"], + getBySlug: ["GET /apps/{app_slug}"], + getInstallation: ["GET /app/installations/{installation_id}"], + getOrgInstallation: ["GET /orgs/{org}/installation"], + getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], + getSubscriptionPlanForAccount: [ + "GET /marketplace_listing/accounts/{account_id}" + ], + getSubscriptionPlanForAccountStubbed: [ + "GET /marketplace_listing/stubbed/accounts/{account_id}" + ], + getUserInstallation: ["GET /users/{username}/installation"], + getWebhookConfigForApp: ["GET /app/hook/config"], + getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], + listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], + listAccountsForPlanStubbed: [ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" + ], + listInstallationReposForAuthenticatedUser: [ + "GET /user/installations/{installation_id}/repositories" + ], + listInstallationRequestsForAuthenticatedApp: [ + "GET /app/installation-requests" + ], + listInstallations: ["GET /app/installations"], + listInstallationsForAuthenticatedUser: ["GET /user/installations"], + listPlans: ["GET /marketplace_listing/plans"], + listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], + listReposAccessibleToInstallation: ["GET /installation/repositories"], + listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], + listSubscriptionsForAuthenticatedUserStubbed: [ + "GET /user/marketplace_purchases/stubbed" + ], + listWebhookDeliveries: ["GET /app/hook/deliveries"], + redeliverWebhookDelivery: [ + "POST /app/hook/deliveries/{delivery_id}/attempts" + ], + removeRepoFromInstallation: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] } + ], + removeRepoFromInstallationForAuthenticatedUser: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}" + ], + resetToken: ["PATCH /applications/{client_id}/token"], + revokeInstallationAccessToken: ["DELETE /installation/token"], + scopeToken: ["POST /applications/{client_id}/token/scoped"], + suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], + unsuspendInstallation: [ + "DELETE /app/installations/{installation_id}/suspended" + ], + updateWebhookConfigForApp: ["PATCH /app/hook/config"] + }, + billing: { + getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], + getGithubActionsBillingUser: [ + "GET /users/{username}/settings/billing/actions" + ], + getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], + getGithubPackagesBillingUser: [ + "GET /users/{username}/settings/billing/packages" + ], + getSharedStorageBillingOrg: [ + "GET /orgs/{org}/settings/billing/shared-storage" + ], + getSharedStorageBillingUser: [ + "GET /users/{username}/settings/billing/shared-storage" + ] + }, + checks: { + create: ["POST /repos/{owner}/{repo}/check-runs"], + createSuite: ["POST /repos/{owner}/{repo}/check-suites"], + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], + listAnnotations: [ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" + ], + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], + listForSuite: [ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" + ], + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], + rerequestRun: [ + "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest" + ], + rerequestSuite: [ + "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" + ], + setSuitesPreferences: [ + "PATCH /repos/{owner}/{repo}/check-suites/preferences" + ], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] + }, + codeScanning: { + deleteAnalysis: [ + "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}" + ], + getAlert: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", + {}, + { renamedParameters: { alert_id: "alert_number" } } + ], + getAnalysis: [ + "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}" + ], + getCodeqlDatabase: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}" + ], + getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"], + getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], + listAlertInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" + ], + listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], + listAlertsInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + {}, + { renamed: ["codeScanning", "listAlertInstances"] } + ], + listCodeqlDatabases: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases" + ], + listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" + ], + updateDefaultSetup: [ + "PATCH /repos/{owner}/{repo}/code-scanning/default-setup" + ], + uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] + }, + codesOfConduct: { + getAllCodesOfConduct: ["GET /codes_of_conduct"], + getConductCode: ["GET /codes_of_conduct/{key}"] + }, + codespaces: { + addRepositoryForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + checkPermissionsForDevcontainer: [ + "GET /repos/{owner}/{repo}/codespaces/permissions_check" + ], + codespaceMachinesForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/machines" + ], + createForAuthenticatedUser: ["POST /user/codespaces"], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + createOrUpdateSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}" + ], + createWithPrForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces" + ], + createWithRepoForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/codespaces" + ], + deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], + deleteFromOrganization: [ + "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + deleteSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}" + ], + exportForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/exports" + ], + getCodespacesForUserInOrg: [ + "GET /orgs/{org}/members/{username}/codespaces" + ], + getExportDetailsForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/exports/{export_id}" + ], + getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], + getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"], + getPublicKeyForAuthenticatedUser: [ + "GET /user/codespaces/secrets/public-key" + ], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + getSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}" + ], + listDevcontainersInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/devcontainers" + ], + listForAuthenticatedUser: ["GET /user/codespaces"], + listInOrganization: [ + "GET /orgs/{org}/codespaces", + {}, + { renamedParameters: { org_id: "org" } } + ], + listInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces" + ], + listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], + listRepositoriesForSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}/repositories" + ], + listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + preFlightWithRepoForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/new" + ], + publishForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/publish" + ], + removeRepositoryForSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + repoMachinesForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/machines" + ], + setRepositoriesForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], + stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], + stopInOrganization: [ + "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop" + ], + updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] + }, + copilot: { + addCopilotForBusinessSeatsForTeams: [ + "POST /orgs/{org}/copilot/billing/selected_teams" + ], + addCopilotForBusinessSeatsForUsers: [ + "POST /orgs/{org}/copilot/billing/selected_users" + ], + cancelCopilotSeatAssignmentForTeams: [ + "DELETE /orgs/{org}/copilot/billing/selected_teams" + ], + cancelCopilotSeatAssignmentForUsers: [ + "DELETE /orgs/{org}/copilot/billing/selected_users" + ], + getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"], + getCopilotSeatDetailsForUser: [ + "GET /orgs/{org}/members/{username}/copilot" + ], + listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"] + }, + dependabot: { + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"], + getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/dependabot/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"], + listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" + ] + }, + dependencyGraph: { + createRepositorySnapshot: [ + "POST /repos/{owner}/{repo}/dependency-graph/snapshots" + ], + diffRange: [ + "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}" + ], + exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"] + }, + emojis: { get: ["GET /emojis"] }, + gists: { + checkIsStarred: ["GET /gists/{gist_id}/star"], + create: ["POST /gists"], + createComment: ["POST /gists/{gist_id}/comments"], + delete: ["DELETE /gists/{gist_id}"], + deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], + fork: ["POST /gists/{gist_id}/forks"], + get: ["GET /gists/{gist_id}"], + getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], + getRevision: ["GET /gists/{gist_id}/{sha}"], + list: ["GET /gists"], + listComments: ["GET /gists/{gist_id}/comments"], + listCommits: ["GET /gists/{gist_id}/commits"], + listForUser: ["GET /users/{username}/gists"], + listForks: ["GET /gists/{gist_id}/forks"], + listPublic: ["GET /gists/public"], + listStarred: ["GET /gists/starred"], + star: ["PUT /gists/{gist_id}/star"], + unstar: ["DELETE /gists/{gist_id}/star"], + update: ["PATCH /gists/{gist_id}"], + updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] + }, + git: { + createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], + createCommit: ["POST /repos/{owner}/{repo}/git/commits"], + createRef: ["POST /repos/{owner}/{repo}/git/refs"], + createTag: ["POST /repos/{owner}/{repo}/git/tags"], + createTree: ["POST /repos/{owner}/{repo}/git/trees"], + deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], + getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], + getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], + getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], + getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], + getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], + listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], + updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] + }, + gitignore: { + getAllTemplates: ["GET /gitignore/templates"], + getTemplate: ["GET /gitignore/templates/{name}"] + }, + interactions: { + getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], + getRestrictionsForYourPublicRepos: [ + "GET /user/interaction-limits", + {}, + { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] } + ], + removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], + removeRestrictionsForRepo: [ + "DELETE /repos/{owner}/{repo}/interaction-limits" + ], + removeRestrictionsForYourPublicRepos: [ + "DELETE /user/interaction-limits", + {}, + { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] } + ], + setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], + setRestrictionsForYourPublicRepos: [ + "PUT /user/interaction-limits", + {}, + { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] } + ] + }, + issues: { + addAssignees: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], + checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + checkUserCanBeAssignedToIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}" + ], + create: ["POST /repos/{owner}/{repo}/issues"], + createComment: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" + ], + createLabel: ["POST /repos/{owner}/{repo}/labels"], + createMilestone: ["POST /repos/{owner}/{repo}/milestones"], + deleteComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" + ], + deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], + deleteMilestone: [ + "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" + ], + get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], + getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], + getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], + getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], + getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], + list: ["GET /issues"], + listAssignees: ["GET /repos/{owner}/{repo}/assignees"], + listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], + listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], + listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], + listEventsForTimeline: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" + ], + listForAuthenticatedUser: ["GET /user/issues"], + listForOrg: ["GET /orgs/{org}/issues"], + listForRepo: ["GET /repos/{owner}/{repo}/issues"], + listLabelsForMilestone: [ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" + ], + listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], + listLabelsOnIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + listMilestones: ["GET /repos/{owner}/{repo}/milestones"], + lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], + removeAllLabels: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + removeAssignees: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + removeLabel: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" + ], + setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], + unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], + update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], + updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], + updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], + updateMilestone: [ + "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" + ] + }, + licenses: { + get: ["GET /licenses/{license}"], + getAllCommonlyUsed: ["GET /licenses"], + getForRepo: ["GET /repos/{owner}/{repo}/license"] + }, + markdown: { + render: ["POST /markdown"], + renderRaw: [ + "POST /markdown/raw", + { headers: { "content-type": "text/plain; charset=utf-8" } } + ] + }, + meta: { + get: ["GET /meta"], + getAllVersions: ["GET /versions"], + getOctocat: ["GET /octocat"], + getZen: ["GET /zen"], + root: ["GET /"] + }, + migrations: { + cancelImport: [ + "DELETE /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.cancelImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#cancel-an-import" + } + ], + deleteArchiveForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/archive" + ], + deleteArchiveForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/archive" + ], + downloadArchiveForOrg: [ + "GET /orgs/{org}/migrations/{migration_id}/archive" + ], + getArchiveForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/archive" + ], + getCommitAuthors: [ + "GET /repos/{owner}/{repo}/import/authors", + {}, + { + deprecated: "octokit.rest.migrations.getCommitAuthors() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-commit-authors" + } + ], + getImportStatus: [ + "GET /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.getImportStatus() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-an-import-status" + } + ], + getLargeFiles: [ + "GET /repos/{owner}/{repo}/import/large_files", + {}, + { + deprecated: "octokit.rest.migrations.getLargeFiles() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-large-files" + } + ], + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], + listForAuthenticatedUser: ["GET /user/migrations"], + listForOrg: ["GET /orgs/{org}/migrations"], + listReposForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/repositories" + ], + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], + listReposForUser: [ + "GET /user/migrations/{migration_id}/repositories", + {}, + { renamed: ["migrations", "listReposForAuthenticatedUser"] } + ], + mapCommitAuthor: [ + "PATCH /repos/{owner}/{repo}/import/authors/{author_id}", + {}, + { + deprecated: "octokit.rest.migrations.mapCommitAuthor() is deprecated, see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author" + } + ], + setLfsPreference: [ + "PATCH /repos/{owner}/{repo}/import/lfs", + {}, + { + deprecated: "octokit.rest.migrations.setLfsPreference() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference" + } + ], + startForAuthenticatedUser: ["POST /user/migrations"], + startForOrg: ["POST /orgs/{org}/migrations"], + startImport: [ + "PUT /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.startImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#start-an-import" + } + ], + unlockRepoForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" + ], + unlockRepoForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" + ], + updateImport: [ + "PATCH /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.updateImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-an-import" + } + ] + }, + orgs: { + addSecurityManagerTeam: [ + "PUT /orgs/{org}/security-managers/teams/{team_slug}" + ], + blockUser: ["PUT /orgs/{org}/blocks/{username}"], + cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], + checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], + checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], + checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], + convertMemberToOutsideCollaborator: [ + "PUT /orgs/{org}/outside_collaborators/{username}" + ], + createInvitation: ["POST /orgs/{org}/invitations"], + createOrUpdateCustomProperties: ["PATCH /orgs/{org}/properties/schema"], + createOrUpdateCustomPropertiesValuesForRepos: [ + "PATCH /orgs/{org}/properties/values" + ], + createOrUpdateCustomProperty: [ + "PUT /orgs/{org}/properties/schema/{custom_property_name}" + ], + createWebhook: ["POST /orgs/{org}/hooks"], + delete: ["DELETE /orgs/{org}"], + deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + enableOrDisableSecurityProductOnAllOrgRepos: [ + "POST /orgs/{org}/{security_product}/{enablement}" + ], + get: ["GET /orgs/{org}"], + getAllCustomProperties: ["GET /orgs/{org}/properties/schema"], + getCustomProperty: [ + "GET /orgs/{org}/properties/schema/{custom_property_name}" + ], + getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], + getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], + getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], + getWebhookDelivery: [ + "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + list: ["GET /organizations"], + listAppInstallations: ["GET /orgs/{org}/installations"], + listBlockedUsers: ["GET /orgs/{org}/blocks"], + listCustomPropertiesValuesForRepos: ["GET /orgs/{org}/properties/values"], + listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], + listForAuthenticatedUser: ["GET /user/orgs"], + listForUser: ["GET /users/{username}/orgs"], + listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], + listMembers: ["GET /orgs/{org}/members"], + listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], + listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPatGrantRepositories: [ + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories" + ], + listPatGrantRequestRepositories: [ + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories" + ], + listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"], + listPatGrants: ["GET /orgs/{org}/personal-access-tokens"], + listPendingInvitations: ["GET /orgs/{org}/invitations"], + listPublicMembers: ["GET /orgs/{org}/public_members"], + listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"], + listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], + listWebhooks: ["GET /orgs/{org}/hooks"], + pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeCustomProperty: [ + "DELETE /orgs/{org}/properties/schema/{custom_property_name}" + ], + removeMember: ["DELETE /orgs/{org}/members/{username}"], + removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], + removeOutsideCollaborator: [ + "DELETE /orgs/{org}/outside_collaborators/{username}" + ], + removePublicMembershipForAuthenticatedUser: [ + "DELETE /orgs/{org}/public_members/{username}" + ], + removeSecurityManagerTeam: [ + "DELETE /orgs/{org}/security-managers/teams/{team_slug}" + ], + reviewPatGrantRequest: [ + "POST /orgs/{org}/personal-access-token-requests/{pat_request_id}" + ], + reviewPatGrantRequestsInBulk: [ + "POST /orgs/{org}/personal-access-token-requests" + ], + setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], + setPublicMembershipForAuthenticatedUser: [ + "PUT /orgs/{org}/public_members/{username}" + ], + unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], + update: ["PATCH /orgs/{org}"], + updateMembershipForAuthenticatedUser: [ + "PATCH /user/memberships/orgs/{org}" + ], + updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"], + updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"], + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], + updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] + }, + packages: { + deletePackageForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}" + ], + deletePackageForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}" + ], + deletePackageForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}" + ], + deletePackageVersionForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getAllPackageVersionsForAPackageOwnedByAnOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + {}, + { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] } + ], + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions", + {}, + { + renamed: [ + "packages", + "getAllPackageVersionsForPackageOwnedByAuthenticatedUser" + ] + } + ], + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions" + ], + getPackageForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}" + ], + getPackageForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}" + ], + getPackageForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}" + ], + getPackageVersionForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + listDockerMigrationConflictingPackagesForAuthenticatedUser: [ + "GET /user/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForOrganization: [ + "GET /orgs/{org}/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForUser: [ + "GET /users/{username}/docker/conflicts" + ], + listPackagesForAuthenticatedUser: ["GET /user/packages"], + listPackagesForOrganization: ["GET /orgs/{org}/packages"], + listPackagesForUser: ["GET /users/{username}/packages"], + restorePackageForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageVersionForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ] + }, + projects: { + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], + createCard: ["POST /projects/columns/{column_id}/cards"], + createColumn: ["POST /projects/{project_id}/columns"], + createForAuthenticatedUser: ["POST /user/projects"], + createForOrg: ["POST /orgs/{org}/projects"], + createForRepo: ["POST /repos/{owner}/{repo}/projects"], + delete: ["DELETE /projects/{project_id}"], + deleteCard: ["DELETE /projects/columns/cards/{card_id}"], + deleteColumn: ["DELETE /projects/columns/{column_id}"], + get: ["GET /projects/{project_id}"], + getCard: ["GET /projects/columns/cards/{card_id}"], + getColumn: ["GET /projects/columns/{column_id}"], + getPermissionForUser: [ + "GET /projects/{project_id}/collaborators/{username}/permission" + ], + listCards: ["GET /projects/columns/{column_id}/cards"], + listCollaborators: ["GET /projects/{project_id}/collaborators"], + listColumns: ["GET /projects/{project_id}/columns"], + listForOrg: ["GET /orgs/{org}/projects"], + listForRepo: ["GET /repos/{owner}/{repo}/projects"], + listForUser: ["GET /users/{username}/projects"], + moveCard: ["POST /projects/columns/cards/{card_id}/moves"], + moveColumn: ["POST /projects/columns/{column_id}/moves"], + removeCollaborator: [ + "DELETE /projects/{project_id}/collaborators/{username}" + ], + update: ["PATCH /projects/{project_id}"], + updateCard: ["PATCH /projects/columns/cards/{card_id}"], + updateColumn: ["PATCH /projects/columns/{column_id}"] + }, + pulls: { + checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + create: ["POST /repos/{owner}/{repo}/pulls"], + createReplyForReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" + ], + createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + createReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + deletePendingReview: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + deleteReviewComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ], + dismissReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" + ], + get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], + getReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + list: ["GET /repos/{owner}/{repo}/pulls"], + listCommentsForReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" + ], + listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], + listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], + listRequestedReviewers: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + listReviewComments: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], + listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + removeRequestedReviewers: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + requestReviewers: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + submitReview: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" + ], + update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], + updateBranch: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" + ], + updateReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + updateReviewComment: [ + "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ] + }, + rateLimit: { get: ["GET /rate_limit"] }, + reactions: { + createForCommitComment: [ + "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + createForIssue: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" + ], + createForIssueComment: [ + "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + createForPullRequestReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + createForRelease: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + createForTeamDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + createForTeamDiscussionInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ], + deleteForCommitComment: [ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForIssue: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" + ], + deleteForIssueComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForPullRequestComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForRelease: [ + "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}" + ], + deleteForTeamDiscussion: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" + ], + deleteForTeamDiscussionComment: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" + ], + listForCommitComment: [ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], + listForIssueComment: [ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + listForPullRequestReviewComment: [ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + listForRelease: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + listForTeamDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + listForTeamDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ] + }, + repos: { + acceptInvitation: [ + "PATCH /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] } + ], + acceptInvitationForAuthenticatedUser: [ + "PATCH /user/repository_invitations/{invitation_id}" + ], + addAppAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], + addStatusCheckContexts: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + addTeamAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + addUserAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + checkAutomatedSecurityFixes: [ + "GET /repos/{owner}/{repo}/automated-security-fixes" + ], + checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], + checkVulnerabilityAlerts: [ + "GET /repos/{owner}/{repo}/vulnerability-alerts" + ], + codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], + compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + compareCommitsWithBasehead: [ + "GET /repos/{owner}/{repo}/compare/{basehead}" + ], + createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], + createCommitComment: [ + "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + createCommitSignatureProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], + createDeployKey: ["POST /repos/{owner}/{repo}/keys"], + createDeployment: ["POST /repos/{owner}/{repo}/deployments"], + createDeploymentBranchPolicy: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + createDeploymentProtectionRule: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + createDeploymentStatus: [ + "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], + createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], + createForAuthenticatedUser: ["POST /user/repos"], + createFork: ["POST /repos/{owner}/{repo}/forks"], + createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateEnvironment: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}" + ], + createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createOrgRuleset: ["POST /orgs/{org}/rulesets"], + createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployment"], + createPagesSite: ["POST /repos/{owner}/{repo}/pages"], + createRelease: ["POST /repos/{owner}/{repo}/releases"], + createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"], + createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], + createUsingTemplate: [ + "POST /repos/{template_owner}/{template_repo}/generate" + ], + createWebhook: ["POST /repos/{owner}/{repo}/hooks"], + declineInvitation: [ + "DELETE /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "declineInvitationForAuthenticatedUser"] } + ], + declineInvitationForAuthenticatedUser: [ + "DELETE /user/repository_invitations/{invitation_id}" + ], + delete: ["DELETE /repos/{owner}/{repo}"], + deleteAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + deleteAdminBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + deleteAnEnvironment: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}" + ], + deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], + deleteBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" + ], + deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], + deleteCommitSignatureProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], + deleteDeployment: [ + "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" + ], + deleteDeploymentBranchPolicy: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], + deleteInvitation: [ + "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], + deletePullRequestReviewProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], + deleteReleaseAsset: [ + "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + deleteTagProtection: [ + "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}" + ], + deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], + disableAutomatedSecurityFixes: [ + "DELETE /repos/{owner}/{repo}/automated-security-fixes" + ], + disableDeploymentProtectionRule: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + disablePrivateVulnerabilityReporting: [ + "DELETE /repos/{owner}/{repo}/private-vulnerability-reporting" + ], + disableVulnerabilityAlerts: [ + "DELETE /repos/{owner}/{repo}/vulnerability-alerts" + ], + downloadArchive: [ + "GET /repos/{owner}/{repo}/zipball/{ref}", + {}, + { renamed: ["repos", "downloadZipballArchive"] } + ], + downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], + downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], + enableAutomatedSecurityFixes: [ + "PUT /repos/{owner}/{repo}/automated-security-fixes" + ], + enablePrivateVulnerabilityReporting: [ + "PUT /repos/{owner}/{repo}/private-vulnerability-reporting" + ], + enableVulnerabilityAlerts: [ + "PUT /repos/{owner}/{repo}/vulnerability-alerts" + ], + generateReleaseNotes: [ + "POST /repos/{owner}/{repo}/releases/generate-notes" + ], + get: ["GET /repos/{owner}/{repo}"], + getAccessRestrictions: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + getAdminBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + getAllDeploymentProtectionRules: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], + getAllStatusCheckContexts: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" + ], + getAllTopics: ["GET /repos/{owner}/{repo}/topics"], + getAppsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" + ], + getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], + getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], + getBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection" + ], + getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"], + getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], + getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], + getCollaboratorPermissionLevel: [ + "GET /repos/{owner}/{repo}/collaborators/{username}/permission" + ], + getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], + getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], + getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], + getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], + getCommitSignatureProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], + getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], + getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getCustomDeploymentProtectionRule: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + getCustomPropertiesValues: ["GET /repos/{owner}/{repo}/properties/values"], + getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], + getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], + getDeploymentBranchPolicy: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + getDeploymentStatus: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" + ], + getEnvironment: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}" + ], + getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], + getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getOrgRuleSuite: ["GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}"], + getOrgRuleSuites: ["GET /orgs/{org}/rulesets/rule-suites"], + getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"], + getOrgRulesets: ["GET /orgs/{org}/rulesets"], + getPages: ["GET /repos/{owner}/{repo}/pages"], + getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], + getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], + getPullRequestReviewProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], + getReadme: ["GET /repos/{owner}/{repo}/readme"], + getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], + getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], + getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], + getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], + getRepoRuleSuite: [ + "GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}" + ], + getRepoRuleSuites: ["GET /repos/{owner}/{repo}/rulesets/rule-suites"], + getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"], + getStatusChecksProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + getTeamsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" + ], + getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], + getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], + getUsersWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" + ], + getViews: ["GET /repos/{owner}/{repo}/traffic/views"], + getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + getWebhookConfigForRepo: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + getWebhookDelivery: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + listActivities: ["GET /repos/{owner}/{repo}/activity"], + listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], + listBranches: ["GET /repos/{owner}/{repo}/branches"], + listBranchesForHeadCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" + ], + listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], + listCommentsForCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], + listCommitStatusesForRef: [ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses" + ], + listCommits: ["GET /repos/{owner}/{repo}/commits"], + listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listCustomDeploymentRuleIntegrations: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" + ], + listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], + listDeploymentBranchPolicies: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + listDeploymentStatuses: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], + listDeployments: ["GET /repos/{owner}/{repo}/deployments"], + listForAuthenticatedUser: ["GET /user/repos"], + listForOrg: ["GET /orgs/{org}/repos"], + listForUser: ["GET /users/{username}/repos"], + listForks: ["GET /repos/{owner}/{repo}/forks"], + listInvitations: ["GET /repos/{owner}/{repo}/invitations"], + listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], + listLanguages: ["GET /repos/{owner}/{repo}/languages"], + listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], + listPublic: ["GET /repositories"], + listPullRequestsAssociatedWithCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" + ], + listReleaseAssets: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets" + ], + listReleases: ["GET /repos/{owner}/{repo}/releases"], + listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], + listTags: ["GET /repos/{owner}/{repo}/tags"], + listTeams: ["GET /repos/{owner}/{repo}/teams"], + listWebhookDeliveries: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" + ], + listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], + merge: ["POST /repos/{owner}/{repo}/merges"], + mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], + pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeAppAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + removeCollaborator: [ + "DELETE /repos/{owner}/{repo}/collaborators/{username}" + ], + removeStatusCheckContexts: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + removeStatusCheckProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + removeTeamAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + removeUserAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], + requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], + setAdminBranchProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + setAppAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + setStatusCheckContexts: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + setTeamAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + setUserAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], + transfer: ["POST /repos/{owner}/{repo}/transfer"], + update: ["PATCH /repos/{owner}/{repo}"], + updateBranchProtection: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection" + ], + updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateDeploymentBranchPolicy: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], + updateInvitation: [ + "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"], + updatePullRequestReviewProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], + updateReleaseAsset: [ + "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + updateStatusCheckPotection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + {}, + { renamed: ["repos", "updateStatusCheckProtection"] } + ], + updateStatusCheckProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], + updateWebhookConfigForRepo: [ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + uploadReleaseAsset: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", + { baseUrl: "https://uploads.github.com" } + ] + }, + search: { + code: ["GET /search/code"], + commits: ["GET /search/commits"], + issuesAndPullRequests: ["GET /search/issues"], + labels: ["GET /search/labels"], + repos: ["GET /search/repositories"], + topics: ["GET /search/topics"], + users: ["GET /search/users"] + }, + secretScanning: { + getAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/secret-scanning/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], + listLocationsForAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ] + }, + securityAdvisories: { + createPrivateVulnerabilityReport: [ + "POST /repos/{owner}/{repo}/security-advisories/reports" + ], + createRepositoryAdvisory: [ + "POST /repos/{owner}/{repo}/security-advisories" + ], + createRepositoryAdvisoryCveRequest: [ + "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve" + ], + getGlobalAdvisory: ["GET /advisories/{ghsa_id}"], + getRepositoryAdvisory: [ + "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ], + listGlobalAdvisories: ["GET /advisories"], + listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"], + listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"], + updateRepositoryAdvisory: [ + "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ] + }, + teams: { + addOrUpdateMembershipForUserInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + addOrUpdateProjectPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + addOrUpdateRepoPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + checkPermissionsForProjectInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + checkPermissionsForRepoInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + create: ["POST /orgs/{org}/teams"], + createDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], + createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], + deleteDiscussionCommentInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + deleteDiscussionInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], + getByName: ["GET /orgs/{org}/teams/{team_slug}"], + getDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + getDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + getMembershipForUserInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + list: ["GET /orgs/{org}/teams"], + listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], + listDiscussionCommentsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], + listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], + listForAuthenticatedUser: ["GET /user/teams"], + listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], + listPendingInvitationsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/invitations" + ], + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], + listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], + removeMembershipForUserInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + removeProjectInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + removeRepoInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + updateDiscussionCommentInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + updateDiscussionInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] + }, + users: { + addEmailForAuthenticated: [ + "POST /user/emails", + {}, + { renamed: ["users", "addEmailForAuthenticatedUser"] } + ], + addEmailForAuthenticatedUser: ["POST /user/emails"], + addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"], + block: ["PUT /user/blocks/{username}"], + checkBlocked: ["GET /user/blocks/{username}"], + checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], + checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], + createGpgKeyForAuthenticated: [ + "POST /user/gpg_keys", + {}, + { renamed: ["users", "createGpgKeyForAuthenticatedUser"] } + ], + createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], + createPublicSshKeyForAuthenticated: [ + "POST /user/keys", + {}, + { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] } + ], + createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], + createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"], + deleteEmailForAuthenticated: [ + "DELETE /user/emails", + {}, + { renamed: ["users", "deleteEmailForAuthenticatedUser"] } + ], + deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], + deleteGpgKeyForAuthenticated: [ + "DELETE /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] } + ], + deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicSshKeyForAuthenticated: [ + "DELETE /user/keys/{key_id}", + {}, + { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] } + ], + deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], + deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"], + deleteSshSigningKeyForAuthenticatedUser: [ + "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}" + ], + follow: ["PUT /user/following/{username}"], + getAuthenticated: ["GET /user"], + getByUsername: ["GET /users/{username}"], + getContextForUser: ["GET /users/{username}/hovercard"], + getGpgKeyForAuthenticated: [ + "GET /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "getGpgKeyForAuthenticatedUser"] } + ], + getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicSshKeyForAuthenticated: [ + "GET /user/keys/{key_id}", + {}, + { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] } + ], + getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], + getSshSigningKeyForAuthenticatedUser: [ + "GET /user/ssh_signing_keys/{ssh_signing_key_id}" + ], + list: ["GET /users"], + listBlockedByAuthenticated: [ + "GET /user/blocks", + {}, + { renamed: ["users", "listBlockedByAuthenticatedUser"] } + ], + listBlockedByAuthenticatedUser: ["GET /user/blocks"], + listEmailsForAuthenticated: [ + "GET /user/emails", + {}, + { renamed: ["users", "listEmailsForAuthenticatedUser"] } + ], + listEmailsForAuthenticatedUser: ["GET /user/emails"], + listFollowedByAuthenticated: [ + "GET /user/following", + {}, + { renamed: ["users", "listFollowedByAuthenticatedUser"] } + ], + listFollowedByAuthenticatedUser: ["GET /user/following"], + listFollowersForAuthenticatedUser: ["GET /user/followers"], + listFollowersForUser: ["GET /users/{username}/followers"], + listFollowingForUser: ["GET /users/{username}/following"], + listGpgKeysForAuthenticated: [ + "GET /user/gpg_keys", + {}, + { renamed: ["users", "listGpgKeysForAuthenticatedUser"] } + ], + listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], + listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], + listPublicEmailsForAuthenticated: [ + "GET /user/public_emails", + {}, + { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] } + ], + listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], + listPublicKeysForUser: ["GET /users/{username}/keys"], + listPublicSshKeysForAuthenticated: [ + "GET /user/keys", + {}, + { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] } + ], + listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], + listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"], + listSocialAccountsForUser: ["GET /users/{username}/social_accounts"], + listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"], + listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"], + setPrimaryEmailVisibilityForAuthenticated: [ + "PATCH /user/email/visibility", + {}, + { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] } + ], + setPrimaryEmailVisibilityForAuthenticatedUser: [ + "PATCH /user/email/visibility" + ], + unblock: ["DELETE /user/blocks/{username}"], + unfollow: ["DELETE /user/following/{username}"], + updateAuthenticated: ["PATCH /user"] + } +}; +var endpoints_default = Endpoints; + +// pkg/dist-src/endpoints-to-methods.js +var endpointMethodsMap = /* @__PURE__ */ new Map(); +for (const [scope, endpoints] of Object.entries(endpoints_default)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign( + { + method, + url + }, + defaults + ); + if (!endpointMethodsMap.has(scope)) { + endpointMethodsMap.set(scope, /* @__PURE__ */ new Map()); + } + endpointMethodsMap.get(scope).set(methodName, { + scope, + methodName, + endpointDefaults, + decorations + }); + } +} +var handler = { + has({ scope }, methodName) { + return endpointMethodsMap.get(scope).has(methodName); + }, + getOwnPropertyDescriptor(target, methodName) { + return { + value: this.get(target, methodName), + // ensures method is in the cache + configurable: true, + writable: true, + enumerable: true + }; + }, + defineProperty(target, methodName, descriptor) { + Object.defineProperty(target.cache, methodName, descriptor); + return true; + }, + deleteProperty(target, methodName) { + delete target.cache[methodName]; + return true; + }, + ownKeys({ scope }) { + return [...endpointMethodsMap.get(scope).keys()]; + }, + set(target, methodName, value) { + return target.cache[methodName] = value; + }, + get({ octokit, scope, cache }, methodName) { + if (cache[methodName]) { + return cache[methodName]; + } + const method = endpointMethodsMap.get(scope).get(methodName); + if (!method) { + return void 0; + } + const { endpointDefaults, decorations } = method; + if (decorations) { + cache[methodName] = decorate( + octokit, + scope, + methodName, + endpointDefaults, + decorations + ); + } else { + cache[methodName] = octokit.request.defaults(endpointDefaults); + } + return cache[methodName]; + } +}; +function endpointsToMethods(octokit) { + const newMethods = {}; + for (const scope of endpointMethodsMap.keys()) { + newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler); + } + return newMethods; +} +function decorate(octokit, scope, methodName, defaults, decorations) { + const requestWithDefaults = octokit.request.defaults(defaults); + function withDecorations(...args) { + let options = requestWithDefaults.endpoint.merge(...args); + if (decorations.mapToData) { + options = Object.assign({}, options, { + data: options[decorations.mapToData], + [decorations.mapToData]: void 0 + }); + return requestWithDefaults(options); + } + if (decorations.renamed) { + const [newScope, newMethodName] = decorations.renamed; + octokit.log.warn( + `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()` + ); + } + if (decorations.deprecated) { + octokit.log.warn(decorations.deprecated); + } + if (decorations.renamedParameters) { + const options2 = requestWithDefaults.endpoint.merge(...args); + for (const [name, alias] of Object.entries( + decorations.renamedParameters + )) { + if (name in options2) { + octokit.log.warn( + `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead` + ); + if (!(alias in options2)) { + options2[alias] = options2[name]; + } + delete options2[name]; + } + } + return requestWithDefaults(options2); + } + return requestWithDefaults(...args); + } + return Object.assign(withDecorations, requestWithDefaults); +} + +// pkg/dist-src/index.js +function restEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + rest: api + }; +} +restEndpointMethods.VERSION = VERSION; +function legacyRestEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + ...api, + rest: api + }; +} +legacyRestEndpointMethods.VERSION = VERSION; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 10537: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + RequestError: () => RequestError +}); +module.exports = __toCommonJS(dist_src_exports); +var import_deprecation = __nccwpck_require__(58932); +var import_once = __toESM(__nccwpck_require__(1223)); +var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var RequestError = class extends Error { + constructor(message, statusCode, options) { + super(message); + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + this.name = "HttpError"; + this.status = statusCode; + let headers; + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } + const requestCopy = Object.assign({}, options.request); + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace( + / .*$/, + " [REDACTED]" + ) + }); + } + requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; + Object.defineProperty(this, "code", { + get() { + logOnceCode( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.code` is deprecated, use `error.status`." + ) + ); + return statusCode; + } + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`." + ) + ); + return headers || {}; + } + }); + } +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 36234: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + request: () => request +}); +module.exports = __toCommonJS(dist_src_exports); +var import_endpoint = __nccwpck_require__(59440); +var import_universal_user_agent = __nccwpck_require__(45030); + +// pkg/dist-src/version.js +var VERSION = "8.1.6"; + +// pkg/dist-src/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} + +// pkg/dist-src/fetch-wrapper.js +var import_request_error = __nccwpck_require__(10537); + +// pkg/dist-src/get-buffer-response.js +function getBufferResponse(response) { + return response.arrayBuffer(); +} + +// pkg/dist-src/fetch-wrapper.js +function fetchWrapper(requestOptions) { + var _a, _b, _c; + const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; + const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false; + if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } + let headers = {}; + let status; + let url; + let { fetch } = globalThis; + if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) { + fetch = requestOptions.request.fetch; + } + if (!fetch) { + throw new Error( + "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing" + ); + } + return fetch(requestOptions.url, { + method: requestOptions.method, + body: requestOptions.body, + headers: requestOptions.headers, + signal: (_c = requestOptions.request) == null ? void 0 : _c.signal, + // duplex must be set if request.body is ReadableStream or Async Iterables. + // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. + ...requestOptions.body && { duplex: "half" } + }).then(async (response) => { + url = response.url; + status = response.status; + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; + } + if ("deprecation" in headers) { + const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn( + `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` + ); + } + if (status === 204 || status === 205) { + return; + } + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; + } + throw new import_request_error.RequestError(response.statusText, status, { + response: { + url, + status, + headers, + data: void 0 + }, + request: requestOptions + }); + } + if (status === 304) { + throw new import_request_error.RequestError("Not modified", status, { + response: { + url, + status, + headers, + data: await getResponseData(response) + }, + request: requestOptions + }); + } + if (status >= 400) { + const data = await getResponseData(response); + const error = new import_request_error.RequestError(toErrorMessage(data), status, { + response: { + url, + status, + headers, + data + }, + request: requestOptions + }); + throw error; + } + return parseSuccessResponseBody ? await getResponseData(response) : response.body; + }).then((data) => { + return { + status, + url, + headers, + data + }; + }).catch((error) => { + if (error instanceof import_request_error.RequestError) + throw error; + else if (error.name === "AbortError") + throw error; + let message = error.message; + if (error.name === "TypeError" && "cause" in error) { + if (error.cause instanceof Error) { + message = error.cause.message; + } else if (typeof error.cause === "string") { + message = error.cause; + } + } + throw new import_request_error.RequestError(message, 500, { + request: requestOptions + }); + }); +} +async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + if (/application\/json/.test(contentType)) { + return response.json().catch(() => response.text()).catch(() => ""); + } + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + return getBufferResponse(response); +} +function toErrorMessage(data) { + if (typeof data === "string") + return data; + if ("message" in data) { + if (Array.isArray(data.errors)) { + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; + } + return data.message; + } + return `Unknown error: ${JSON.stringify(data)}`; +} + +// pkg/dist-src/with-defaults.js +function withDefaults(oldEndpoint, newDefaults) { + const endpoint2 = oldEndpoint.defaults(newDefaults); + const newApi = function(route, parameters) { + const endpointOptions = endpoint2.merge(route, parameters); + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint2.parse(endpointOptions)); + } + const request2 = (route2, parameters2) => { + return fetchWrapper( + endpoint2.parse(endpoint2.merge(route2, parameters2)) + ); + }; + Object.assign(request2, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); + return endpointOptions.request.hook(request2, endpointOptions); + }; + return Object.assign(newApi, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); +} + +// pkg/dist-src/index.js +var request = withDefaults(import_endpoint.endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + } +}); +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 20940: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['accessanalyzer'] = {}; +AWS.AccessAnalyzer = Service.defineService('accessanalyzer', ['2019-11-01']); +Object.defineProperty(apiLoader.services['accessanalyzer'], '2019-11-01', { + get: function get() { + var model = __nccwpck_require__(30590); + model.paginators = (__nccwpck_require__(63080)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.AccessAnalyzer; + + +/***/ }), + +/***/ 32400: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['account'] = {}; +AWS.Account = Service.defineService('account', ['2021-02-01']); +Object.defineProperty(apiLoader.services['account'], '2021-02-01', { + get: function get() { + var model = __nccwpck_require__(36713); + model.paginators = (__nccwpck_require__(52324)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Account; + + +/***/ }), + +/***/ 30838: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['acm'] = {}; +AWS.ACM = Service.defineService('acm', ['2015-12-08']); +Object.defineProperty(apiLoader.services['acm'], '2015-12-08', { + get: function get() { + var model = __nccwpck_require__(34662); + model.paginators = (__nccwpck_require__(42680)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(85678)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ACM; + + +/***/ }), + +/***/ 18450: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['acmpca'] = {}; +AWS.ACMPCA = Service.defineService('acmpca', ['2017-08-22']); +Object.defineProperty(apiLoader.services['acmpca'], '2017-08-22', { + get: function get() { + var model = __nccwpck_require__(33004); + model.paginators = (__nccwpck_require__(21209)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(89217)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ACMPCA; + + +/***/ }), + +/***/ 14578: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['alexaforbusiness'] = {}; +AWS.AlexaForBusiness = Service.defineService('alexaforbusiness', ['2017-11-09']); +Object.defineProperty(apiLoader.services['alexaforbusiness'], '2017-11-09', { + get: function get() { + var model = __nccwpck_require__(69786); + model.paginators = (__nccwpck_require__(21009)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.AlexaForBusiness; + + +/***/ }), + +/***/ 26296: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +module.exports = { + ACM: __nccwpck_require__(30838), + APIGateway: __nccwpck_require__(91759), + ApplicationAutoScaling: __nccwpck_require__(25598), + AppStream: __nccwpck_require__(21730), + AutoScaling: __nccwpck_require__(31652), + Batch: __nccwpck_require__(10000), + Budgets: __nccwpck_require__(43923), + CloudDirectory: __nccwpck_require__(56231), + CloudFormation: __nccwpck_require__(74643), + CloudFront: __nccwpck_require__(48058), + CloudHSM: __nccwpck_require__(59976), + CloudSearch: __nccwpck_require__(72321), + CloudSearchDomain: __nccwpck_require__(64072), + CloudTrail: __nccwpck_require__(65512), + CloudWatch: __nccwpck_require__(6763), + CloudWatchEvents: __nccwpck_require__(38124), + CloudWatchLogs: __nccwpck_require__(96693), + CodeBuild: __nccwpck_require__(60450), + CodeCommit: __nccwpck_require__(71323), + CodeDeploy: __nccwpck_require__(54599), + CodePipeline: __nccwpck_require__(22938), + CognitoIdentity: __nccwpck_require__(58291), + CognitoIdentityServiceProvider: __nccwpck_require__(31379), + CognitoSync: __nccwpck_require__(74770), + ConfigService: __nccwpck_require__(34061), + CUR: __nccwpck_require__(5026), + DataPipeline: __nccwpck_require__(65688), + DeviceFarm: __nccwpck_require__(26272), + DirectConnect: __nccwpck_require__(73783), + DirectoryService: __nccwpck_require__(83908), + Discovery: __nccwpck_require__(81690), + DMS: __nccwpck_require__(69868), + DynamoDB: __nccwpck_require__(14347), + DynamoDBStreams: __nccwpck_require__(88090), + EC2: __nccwpck_require__(7778), + ECR: __nccwpck_require__(15211), + ECS: __nccwpck_require__(16615), + EFS: __nccwpck_require__(34375), + ElastiCache: __nccwpck_require__(81065), + ElasticBeanstalk: __nccwpck_require__(14897), + ELB: __nccwpck_require__(10907), + ELBv2: __nccwpck_require__(44311), + EMR: __nccwpck_require__(50470), + ES: __nccwpck_require__(84462), + ElasticTranscoder: __nccwpck_require__(40745), + Firehose: __nccwpck_require__(92831), + GameLift: __nccwpck_require__(8085), + Glacier: __nccwpck_require__(63249), + Health: __nccwpck_require__(21834), + IAM: __nccwpck_require__(50058), + ImportExport: __nccwpck_require__(6769), + Inspector: __nccwpck_require__(89439), + Iot: __nccwpck_require__(98392), + IotData: __nccwpck_require__(6564), + Kinesis: __nccwpck_require__(49876), + KinesisAnalytics: __nccwpck_require__(90042), + KMS: __nccwpck_require__(56782), + Lambda: __nccwpck_require__(13321), + LexRuntime: __nccwpck_require__(62716), + Lightsail: __nccwpck_require__(22718), + MachineLearning: __nccwpck_require__(82907), + MarketplaceCommerceAnalytics: __nccwpck_require__(4540), + MarketplaceMetering: __nccwpck_require__(39297), + MTurk: __nccwpck_require__(79954), + MobileAnalytics: __nccwpck_require__(66690), + OpsWorks: __nccwpck_require__(75691), + OpsWorksCM: __nccwpck_require__(80388), + Organizations: __nccwpck_require__(52560), + Pinpoint: __nccwpck_require__(18388), + Polly: __nccwpck_require__(97332), + RDS: __nccwpck_require__(71578), + Redshift: __nccwpck_require__(84853), + Rekognition: __nccwpck_require__(65470), + ResourceGroupsTaggingAPI: __nccwpck_require__(7385), + Route53: __nccwpck_require__(44968), + Route53Domains: __nccwpck_require__(51994), + S3: __nccwpck_require__(83256), + S3Control: __nccwpck_require__(99817), + ServiceCatalog: __nccwpck_require__(822), + SES: __nccwpck_require__(46816), + Shield: __nccwpck_require__(20271), + SimpleDB: __nccwpck_require__(10120), + SMS: __nccwpck_require__(57719), + Snowball: __nccwpck_require__(510), + SNS: __nccwpck_require__(28581), + SQS: __nccwpck_require__(63172), + SSM: __nccwpck_require__(83380), + StorageGateway: __nccwpck_require__(89190), + StepFunctions: __nccwpck_require__(8136), + STS: __nccwpck_require__(57513), + Support: __nccwpck_require__(1099), + SWF: __nccwpck_require__(32327), + XRay: __nccwpck_require__(41548), + WAF: __nccwpck_require__(72742), + WAFRegional: __nccwpck_require__(23153), + WorkDocs: __nccwpck_require__(38835), + WorkSpaces: __nccwpck_require__(25513), + CodeStar: __nccwpck_require__(98336), + LexModelBuildingService: __nccwpck_require__(37397), + MarketplaceEntitlementService: __nccwpck_require__(53707), + Athena: __nccwpck_require__(29434), + Greengrass: __nccwpck_require__(20690), + DAX: __nccwpck_require__(71398), + MigrationHub: __nccwpck_require__(14688), + CloudHSMV2: __nccwpck_require__(70889), + Glue: __nccwpck_require__(31658), + Mobile: __nccwpck_require__(39782), + Pricing: __nccwpck_require__(92765), + CostExplorer: __nccwpck_require__(79523), + MediaConvert: __nccwpck_require__(57220), + MediaLive: __nccwpck_require__(7509), + MediaPackage: __nccwpck_require__(91620), + MediaStore: __nccwpck_require__(83748), + MediaStoreData: __nccwpck_require__(98703), + AppSync: __nccwpck_require__(12402), + GuardDuty: __nccwpck_require__(40755), + MQ: __nccwpck_require__(23093), + Comprehend: __nccwpck_require__(62878), + IoTJobsDataPlane: __nccwpck_require__(42332), + KinesisVideoArchivedMedia: __nccwpck_require__(5580), + KinesisVideoMedia: __nccwpck_require__(81308), + KinesisVideo: __nccwpck_require__(89927), + SageMakerRuntime: __nccwpck_require__(85044), + SageMaker: __nccwpck_require__(77657), + Translate: __nccwpck_require__(72544), + ResourceGroups: __nccwpck_require__(58756), + AlexaForBusiness: __nccwpck_require__(14578), + Cloud9: __nccwpck_require__(85473), + ServerlessApplicationRepository: __nccwpck_require__(62402), + ServiceDiscovery: __nccwpck_require__(91569), + WorkMail: __nccwpck_require__(38374), + AutoScalingPlans: __nccwpck_require__(2554), + TranscribeService: __nccwpck_require__(75811), + Connect: __nccwpck_require__(13879), + ACMPCA: __nccwpck_require__(18450), + FMS: __nccwpck_require__(11316), + SecretsManager: __nccwpck_require__(85131), + IoTAnalytics: __nccwpck_require__(67409), + IoT1ClickDevicesService: __nccwpck_require__(39474), + IoT1ClickProjects: __nccwpck_require__(4686), + PI: __nccwpck_require__(15505), + Neptune: __nccwpck_require__(30047), + MediaTailor: __nccwpck_require__(99658), + EKS: __nccwpck_require__(23337), + DLM: __nccwpck_require__(24958), + Signer: __nccwpck_require__(71596), + Chime: __nccwpck_require__(84646), + PinpointEmail: __nccwpck_require__(83060), + RAM: __nccwpck_require__(94394), + Route53Resolver: __nccwpck_require__(25894), + PinpointSMSVoice: __nccwpck_require__(46605), + QuickSight: __nccwpck_require__(29898), + RDSDataService: __nccwpck_require__(30147), + Amplify: __nccwpck_require__(38090), + DataSync: __nccwpck_require__(25308), + RoboMaker: __nccwpck_require__(18068), + Transfer: __nccwpck_require__(51585), + GlobalAccelerator: __nccwpck_require__(19306), + ComprehendMedical: __nccwpck_require__(32349), + KinesisAnalyticsV2: __nccwpck_require__(74631), + MediaConnect: __nccwpck_require__(67639), + FSx: __nccwpck_require__(60642), + SecurityHub: __nccwpck_require__(21550), + AppMesh: __nccwpck_require__(69226), + LicenseManager: __nccwpck_require__(34693), + Kafka: __nccwpck_require__(56775), + ApiGatewayManagementApi: __nccwpck_require__(31762), + ApiGatewayV2: __nccwpck_require__(44987), + DocDB: __nccwpck_require__(55129), + Backup: __nccwpck_require__(82455), + WorkLink: __nccwpck_require__(48579), + Textract: __nccwpck_require__(58523), + ManagedBlockchain: __nccwpck_require__(85143), + MediaPackageVod: __nccwpck_require__(14962), + GroundStation: __nccwpck_require__(80494), + IoTThingsGraph: __nccwpck_require__(58905), + IoTEvents: __nccwpck_require__(88065), + IoTEventsData: __nccwpck_require__(56973), + Personalize: __nccwpck_require__(33696), + PersonalizeEvents: __nccwpck_require__(88170), + PersonalizeRuntime: __nccwpck_require__(66184), + ApplicationInsights: __nccwpck_require__(83972), + ServiceQuotas: __nccwpck_require__(57800), + EC2InstanceConnect: __nccwpck_require__(92209), + EventBridge: __nccwpck_require__(898), + LakeFormation: __nccwpck_require__(6726), + ForecastService: __nccwpck_require__(12942), + ForecastQueryService: __nccwpck_require__(36822), + QLDB: __nccwpck_require__(71266), + QLDBSession: __nccwpck_require__(55423), + WorkMailMessageFlow: __nccwpck_require__(67025), + CodeStarNotifications: __nccwpck_require__(15141), + SavingsPlans: __nccwpck_require__(62825), + SSO: __nccwpck_require__(71096), + SSOOIDC: __nccwpck_require__(49870), + MarketplaceCatalog: __nccwpck_require__(2609), + DataExchange: __nccwpck_require__(11024), + SESV2: __nccwpck_require__(20142), + MigrationHubConfig: __nccwpck_require__(62658), + ConnectParticipant: __nccwpck_require__(94198), + AppConfig: __nccwpck_require__(78606), + IoTSecureTunneling: __nccwpck_require__(98562), + WAFV2: __nccwpck_require__(50353), + ElasticInference: __nccwpck_require__(37708), + Imagebuilder: __nccwpck_require__(57511), + Schemas: __nccwpck_require__(55713), + AccessAnalyzer: __nccwpck_require__(20940), + CodeGuruReviewer: __nccwpck_require__(60070), + CodeGuruProfiler: __nccwpck_require__(65704), + ComputeOptimizer: __nccwpck_require__(64459), + FraudDetector: __nccwpck_require__(99830), + Kendra: __nccwpck_require__(66122), + NetworkManager: __nccwpck_require__(37610), + Outposts: __nccwpck_require__(27551), + AugmentedAIRuntime: __nccwpck_require__(33960), + EBS: __nccwpck_require__(62837), + KinesisVideoSignalingChannels: __nccwpck_require__(12710), + Detective: __nccwpck_require__(60674), + CodeStarconnections: __nccwpck_require__(78270), + Synthetics: __nccwpck_require__(25910), + IoTSiteWise: __nccwpck_require__(89690), + Macie2: __nccwpck_require__(57330), + CodeArtifact: __nccwpck_require__(91983), + Honeycode: __nccwpck_require__(38889), + IVS: __nccwpck_require__(67701), + Braket: __nccwpck_require__(35429), + IdentityStore: __nccwpck_require__(60222), + Appflow: __nccwpck_require__(60844), + RedshiftData: __nccwpck_require__(203), + SSOAdmin: __nccwpck_require__(66644), + TimestreamQuery: __nccwpck_require__(24529), + TimestreamWrite: __nccwpck_require__(1573), + S3Outposts: __nccwpck_require__(90493), + DataBrew: __nccwpck_require__(35846), + ServiceCatalogAppRegistry: __nccwpck_require__(79068), + NetworkFirewall: __nccwpck_require__(84626), + MWAA: __nccwpck_require__(32712), + AmplifyBackend: __nccwpck_require__(2806), + AppIntegrations: __nccwpck_require__(85479), + ConnectContactLens: __nccwpck_require__(41847), + DevOpsGuru: __nccwpck_require__(90673), + ECRPUBLIC: __nccwpck_require__(90244), + LookoutVision: __nccwpck_require__(65046), + SageMakerFeatureStoreRuntime: __nccwpck_require__(67644), + CustomerProfiles: __nccwpck_require__(28379), + AuditManager: __nccwpck_require__(20472), + EMRcontainers: __nccwpck_require__(49984), + HealthLake: __nccwpck_require__(64254), + SagemakerEdge: __nccwpck_require__(38966), + Amp: __nccwpck_require__(96881), + GreengrassV2: __nccwpck_require__(45126), + IotDeviceAdvisor: __nccwpck_require__(97569), + IoTFleetHub: __nccwpck_require__(42513), + IoTWireless: __nccwpck_require__(8226), + Location: __nccwpck_require__(44594), + WellArchitected: __nccwpck_require__(86263), + LexModelsV2: __nccwpck_require__(27254), + LexRuntimeV2: __nccwpck_require__(33855), + Fis: __nccwpck_require__(73003), + LookoutMetrics: __nccwpck_require__(78708), + Mgn: __nccwpck_require__(41339), + LookoutEquipment: __nccwpck_require__(21843), + Nimble: __nccwpck_require__(89428), + Finspace: __nccwpck_require__(3052), + Finspacedata: __nccwpck_require__(96869), + SSMContacts: __nccwpck_require__(12577), + SSMIncidents: __nccwpck_require__(20590), + ApplicationCostProfiler: __nccwpck_require__(20887), + AppRunner: __nccwpck_require__(75589), + Proton: __nccwpck_require__(9275), + Route53RecoveryCluster: __nccwpck_require__(35738), + Route53RecoveryControlConfig: __nccwpck_require__(16063), + Route53RecoveryReadiness: __nccwpck_require__(79106), + ChimeSDKIdentity: __nccwpck_require__(55975), + ChimeSDKMessaging: __nccwpck_require__(25255), + SnowDeviceManagement: __nccwpck_require__(64655), + MemoryDB: __nccwpck_require__(50782), + OpenSearch: __nccwpck_require__(60358), + KafkaConnect: __nccwpck_require__(61879), + VoiceID: __nccwpck_require__(28747), + Wisdom: __nccwpck_require__(85266), + Account: __nccwpck_require__(32400), + CloudControl: __nccwpck_require__(25630), + Grafana: __nccwpck_require__(51050), + Panorama: __nccwpck_require__(20368), + ChimeSDKMeetings: __nccwpck_require__(80788), + Resiliencehub: __nccwpck_require__(21173), + MigrationHubStrategy: __nccwpck_require__(96533), + AppConfigData: __nccwpck_require__(45282), + Drs: __nccwpck_require__(41116), + MigrationHubRefactorSpaces: __nccwpck_require__(2925), + Evidently: __nccwpck_require__(21440), + Inspector2: __nccwpck_require__(98650), + Rbin: __nccwpck_require__(70145), + RUM: __nccwpck_require__(53237), + BackupGateway: __nccwpck_require__(68277), + IoTTwinMaker: __nccwpck_require__(65010), + WorkSpacesWeb: __nccwpck_require__(94124), + AmplifyUIBuilder: __nccwpck_require__(89937), + Keyspaces: __nccwpck_require__(24789), + Billingconductor: __nccwpck_require__(38416), + PinpointSMSVoiceV2: __nccwpck_require__(478), + Ivschat: __nccwpck_require__(17077), + ChimeSDKMediaPipelines: __nccwpck_require__(18423), + EMRServerless: __nccwpck_require__(219), + M2: __nccwpck_require__(22482), + ConnectCampaigns: __nccwpck_require__(42789), + RedshiftServerless: __nccwpck_require__(29987), + RolesAnywhere: __nccwpck_require__(83604), + LicenseManagerUserSubscriptions: __nccwpck_require__(37725), + BackupStorage: __nccwpck_require__(82304), + PrivateNetworks: __nccwpck_require__(63088), + SupportApp: __nccwpck_require__(51288), + ControlTower: __nccwpck_require__(77574), + IoTFleetWise: __nccwpck_require__(94329), + MigrationHubOrchestrator: __nccwpck_require__(66120), + ConnectCases: __nccwpck_require__(72223), + ResourceExplorer2: __nccwpck_require__(74071), + Scheduler: __nccwpck_require__(94840), + ChimeSDKVoice: __nccwpck_require__(349), + IoTRoboRunner: __nccwpck_require__(22163), + SsmSap: __nccwpck_require__(44552), + OAM: __nccwpck_require__(9319), + ARCZonalShift: __nccwpck_require__(54280), + Omics: __nccwpck_require__(75114), + OpenSearchServerless: __nccwpck_require__(86277), + SecurityLake: __nccwpck_require__(84296), + SimSpaceWeaver: __nccwpck_require__(37090), + DocDBElastic: __nccwpck_require__(20792), + SageMakerGeospatial: __nccwpck_require__(4707), + CodeCatalyst: __nccwpck_require__(19499), + Pipes: __nccwpck_require__(14220), + SageMakerMetrics: __nccwpck_require__(28199), + KinesisVideoWebRTCStorage: __nccwpck_require__(52642), + LicenseManagerLinuxSubscriptions: __nccwpck_require__(52687), + KendraRanking: __nccwpck_require__(46255), + CleanRooms: __nccwpck_require__(15130), + CloudTrailData: __nccwpck_require__(31191), + Tnb: __nccwpck_require__(15300), + InternetMonitor: __nccwpck_require__(84099), + IVSRealTime: __nccwpck_require__(51946), + VPCLattice: __nccwpck_require__(78952), + OSIS: __nccwpck_require__(98021), + MediaPackageV2: __nccwpck_require__(53264), + PaymentCryptography: __nccwpck_require__(11594), + PaymentCryptographyData: __nccwpck_require__(96559), + CodeGuruSecurity: __nccwpck_require__(32620), + VerifiedPermissions: __nccwpck_require__(35604), + AppFabric: __nccwpck_require__(46318), + MedicalImaging: __nccwpck_require__(79712), + EntityResolution: __nccwpck_require__(22697), + ManagedBlockchainQuery: __nccwpck_require__(51046), + Neptunedata: __nccwpck_require__(25737), + PcaConnectorAd: __nccwpck_require__(55959), + Bedrock: __nccwpck_require__(88808), + BedrockRuntime: __nccwpck_require__(99177), + DataZone: __nccwpck_require__(31763), + LaunchWizard: __nccwpck_require__(71060), + TrustedAdvisor: __nccwpck_require__(4992), + CloudFrontKeyValueStore: __nccwpck_require__(47859), + InspectorScan: __nccwpck_require__(25467), + BCMDataExports: __nccwpck_require__(56703), + CostOptimizationHub: __nccwpck_require__(55443), + EKSAuth: __nccwpck_require__(66164), + FreeTier: __nccwpck_require__(11681), + Repostspace: __nccwpck_require__(21154), + WorkSpacesThinClient: __nccwpck_require__(22033), + B2bi: __nccwpck_require__(91065), + BedrockAgent: __nccwpck_require__(9025), + BedrockAgentRuntime: __nccwpck_require__(16946), + QBusiness: __nccwpck_require__(26842), + QConnect: __nccwpck_require__(39094), + CleanRoomsML: __nccwpck_require__(47594), + MarketplaceAgreement: __nccwpck_require__(50379), + MarketplaceDeployment: __nccwpck_require__(56811), + NeptuneGraph: __nccwpck_require__(77598), + NetworkMonitor: __nccwpck_require__(77614), + SupplyChain: __nccwpck_require__(39674) +}; + +/***/ }), + +/***/ 96881: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['amp'] = {}; +AWS.Amp = Service.defineService('amp', ['2020-08-01']); +Object.defineProperty(apiLoader.services['amp'], '2020-08-01', { + get: function get() { + var model = __nccwpck_require__(78362); + model.paginators = (__nccwpck_require__(75928)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(58239)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Amp; + + +/***/ }), + +/***/ 38090: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['amplify'] = {}; +AWS.Amplify = Service.defineService('amplify', ['2017-07-25']); +Object.defineProperty(apiLoader.services['amplify'], '2017-07-25', { + get: function get() { + var model = __nccwpck_require__(36813); + model.paginators = (__nccwpck_require__(53733)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Amplify; + + +/***/ }), + +/***/ 2806: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['amplifybackend'] = {}; +AWS.AmplifyBackend = Service.defineService('amplifybackend', ['2020-08-11']); +Object.defineProperty(apiLoader.services['amplifybackend'], '2020-08-11', { + get: function get() { + var model = __nccwpck_require__(23939); + model.paginators = (__nccwpck_require__(27232)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.AmplifyBackend; + + +/***/ }), + +/***/ 89937: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['amplifyuibuilder'] = {}; +AWS.AmplifyUIBuilder = Service.defineService('amplifyuibuilder', ['2021-08-11']); +Object.defineProperty(apiLoader.services['amplifyuibuilder'], '2021-08-11', { + get: function get() { + var model = __nccwpck_require__(48987); + model.paginators = (__nccwpck_require__(56072)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(70564)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.AmplifyUIBuilder; + + +/***/ }), + +/***/ 91759: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['apigateway'] = {}; +AWS.APIGateway = Service.defineService('apigateway', ['2015-07-09']); +__nccwpck_require__(4338); +Object.defineProperty(apiLoader.services['apigateway'], '2015-07-09', { + get: function get() { + var model = __nccwpck_require__(59463); + model.paginators = (__nccwpck_require__(25878)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.APIGateway; + + +/***/ }), + +/***/ 31762: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['apigatewaymanagementapi'] = {}; +AWS.ApiGatewayManagementApi = Service.defineService('apigatewaymanagementapi', ['2018-11-29']); +Object.defineProperty(apiLoader.services['apigatewaymanagementapi'], '2018-11-29', { + get: function get() { + var model = __nccwpck_require__(57832); + model.paginators = (__nccwpck_require__(2787)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ApiGatewayManagementApi; + + +/***/ }), + +/***/ 44987: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['apigatewayv2'] = {}; +AWS.ApiGatewayV2 = Service.defineService('apigatewayv2', ['2018-11-29']); +Object.defineProperty(apiLoader.services['apigatewayv2'], '2018-11-29', { + get: function get() { + var model = __nccwpck_require__(59326); + model.paginators = (__nccwpck_require__(90171)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ApiGatewayV2; + + +/***/ }), + +/***/ 78606: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['appconfig'] = {}; +AWS.AppConfig = Service.defineService('appconfig', ['2019-10-09']); +Object.defineProperty(apiLoader.services['appconfig'], '2019-10-09', { + get: function get() { + var model = __nccwpck_require__(44701); + model.paginators = (__nccwpck_require__(41789)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.AppConfig; + + +/***/ }), + +/***/ 45282: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['appconfigdata'] = {}; +AWS.AppConfigData = Service.defineService('appconfigdata', ['2021-11-11']); +Object.defineProperty(apiLoader.services['appconfigdata'], '2021-11-11', { + get: function get() { + var model = __nccwpck_require__(86796); + model.paginators = (__nccwpck_require__(48010)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.AppConfigData; + + +/***/ }), + +/***/ 46318: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['appfabric'] = {}; +AWS.AppFabric = Service.defineService('appfabric', ['2023-05-19']); +Object.defineProperty(apiLoader.services['appfabric'], '2023-05-19', { + get: function get() { + var model = __nccwpck_require__(78267); + model.paginators = (__nccwpck_require__(42193)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(44821)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.AppFabric; + + +/***/ }), + +/***/ 60844: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['appflow'] = {}; +AWS.Appflow = Service.defineService('appflow', ['2020-08-23']); +Object.defineProperty(apiLoader.services['appflow'], '2020-08-23', { + get: function get() { + var model = __nccwpck_require__(32840); + model.paginators = (__nccwpck_require__(16916)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Appflow; + + +/***/ }), + +/***/ 85479: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['appintegrations'] = {}; +AWS.AppIntegrations = Service.defineService('appintegrations', ['2020-07-29']); +Object.defineProperty(apiLoader.services['appintegrations'], '2020-07-29', { + get: function get() { + var model = __nccwpck_require__(62033); + model.paginators = (__nccwpck_require__(61866)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.AppIntegrations; + + +/***/ }), + +/***/ 25598: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['applicationautoscaling'] = {}; +AWS.ApplicationAutoScaling = Service.defineService('applicationautoscaling', ['2016-02-06']); +Object.defineProperty(apiLoader.services['applicationautoscaling'], '2016-02-06', { + get: function get() { + var model = __nccwpck_require__(47320); + model.paginators = (__nccwpck_require__(40322)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ApplicationAutoScaling; + + +/***/ }), + +/***/ 20887: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['applicationcostprofiler'] = {}; +AWS.ApplicationCostProfiler = Service.defineService('applicationcostprofiler', ['2020-09-10']); +Object.defineProperty(apiLoader.services['applicationcostprofiler'], '2020-09-10', { + get: function get() { + var model = __nccwpck_require__(96818); + model.paginators = (__nccwpck_require__(41331)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ApplicationCostProfiler; + + +/***/ }), + +/***/ 83972: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['applicationinsights'] = {}; +AWS.ApplicationInsights = Service.defineService('applicationinsights', ['2018-11-25']); +Object.defineProperty(apiLoader.services['applicationinsights'], '2018-11-25', { + get: function get() { + var model = __nccwpck_require__(96143); + model.paginators = (__nccwpck_require__(22242)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ApplicationInsights; + + +/***/ }), + +/***/ 69226: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['appmesh'] = {}; +AWS.AppMesh = Service.defineService('appmesh', ['2018-10-01', '2018-10-01*', '2019-01-25']); +Object.defineProperty(apiLoader.services['appmesh'], '2018-10-01', { + get: function get() { + var model = __nccwpck_require__(64780); + model.paginators = (__nccwpck_require__(54936)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); +Object.defineProperty(apiLoader.services['appmesh'], '2019-01-25', { + get: function get() { + var model = __nccwpck_require__(78066); + model.paginators = (__nccwpck_require__(37698)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.AppMesh; + + +/***/ }), + +/***/ 75589: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['apprunner'] = {}; +AWS.AppRunner = Service.defineService('apprunner', ['2020-05-15']); +Object.defineProperty(apiLoader.services['apprunner'], '2020-05-15', { + get: function get() { + var model = __nccwpck_require__(30036); + model.paginators = (__nccwpck_require__(50293)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.AppRunner; + + +/***/ }), + +/***/ 21730: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['appstream'] = {}; +AWS.AppStream = Service.defineService('appstream', ['2016-12-01']); +Object.defineProperty(apiLoader.services['appstream'], '2016-12-01', { + get: function get() { + var model = __nccwpck_require__(85538); + model.paginators = (__nccwpck_require__(32191)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(21134)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.AppStream; + + +/***/ }), + +/***/ 12402: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['appsync'] = {}; +AWS.AppSync = Service.defineService('appsync', ['2017-07-25']); +Object.defineProperty(apiLoader.services['appsync'], '2017-07-25', { + get: function get() { + var model = __nccwpck_require__(94937); + model.paginators = (__nccwpck_require__(50233)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.AppSync; + + +/***/ }), + +/***/ 54280: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['arczonalshift'] = {}; +AWS.ARCZonalShift = Service.defineService('arczonalshift', ['2022-10-30']); +Object.defineProperty(apiLoader.services['arczonalshift'], '2022-10-30', { + get: function get() { + var model = __nccwpck_require__(52286); + model.paginators = (__nccwpck_require__(70002)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ARCZonalShift; + + +/***/ }), + +/***/ 29434: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['athena'] = {}; +AWS.Athena = Service.defineService('athena', ['2017-05-18']); +Object.defineProperty(apiLoader.services['athena'], '2017-05-18', { + get: function get() { + var model = __nccwpck_require__(28680); + model.paginators = (__nccwpck_require__(44417)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Athena; + + +/***/ }), + +/***/ 20472: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['auditmanager'] = {}; +AWS.AuditManager = Service.defineService('auditmanager', ['2017-07-25']); +Object.defineProperty(apiLoader.services['auditmanager'], '2017-07-25', { + get: function get() { + var model = __nccwpck_require__(41672); + model.paginators = (__nccwpck_require__(41321)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.AuditManager; + + +/***/ }), + +/***/ 33960: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['augmentedairuntime'] = {}; +AWS.AugmentedAIRuntime = Service.defineService('augmentedairuntime', ['2019-11-07']); +Object.defineProperty(apiLoader.services['augmentedairuntime'], '2019-11-07', { + get: function get() { + var model = __nccwpck_require__(57704); + model.paginators = (__nccwpck_require__(13201)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.AugmentedAIRuntime; + + +/***/ }), + +/***/ 31652: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['autoscaling'] = {}; +AWS.AutoScaling = Service.defineService('autoscaling', ['2011-01-01']); +Object.defineProperty(apiLoader.services['autoscaling'], '2011-01-01', { + get: function get() { + var model = __nccwpck_require__(55394); + model.paginators = (__nccwpck_require__(81436)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.AutoScaling; + + +/***/ }), + +/***/ 2554: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['autoscalingplans'] = {}; +AWS.AutoScalingPlans = Service.defineService('autoscalingplans', ['2018-01-06']); +Object.defineProperty(apiLoader.services['autoscalingplans'], '2018-01-06', { + get: function get() { + var model = __nccwpck_require__(53216); + model.paginators = (__nccwpck_require__(64985)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.AutoScalingPlans; + + +/***/ }), + +/***/ 91065: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['b2bi'] = {}; +AWS.B2bi = Service.defineService('b2bi', ['2022-06-23']); +Object.defineProperty(apiLoader.services['b2bi'], '2022-06-23', { + get: function get() { + var model = __nccwpck_require__(93012); + model.paginators = (__nccwpck_require__(13608)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.B2bi; + + +/***/ }), + +/***/ 82455: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['backup'] = {}; +AWS.Backup = Service.defineService('backup', ['2018-11-15']); +Object.defineProperty(apiLoader.services['backup'], '2018-11-15', { + get: function get() { + var model = __nccwpck_require__(77990); + model.paginators = (__nccwpck_require__(54869)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Backup; + + +/***/ }), + +/***/ 68277: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['backupgateway'] = {}; +AWS.BackupGateway = Service.defineService('backupgateway', ['2021-01-01']); +Object.defineProperty(apiLoader.services['backupgateway'], '2021-01-01', { + get: function get() { + var model = __nccwpck_require__(96863); + model.paginators = (__nccwpck_require__(34946)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.BackupGateway; + + +/***/ }), + +/***/ 82304: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['backupstorage'] = {}; +AWS.BackupStorage = Service.defineService('backupstorage', ['2018-04-10']); +Object.defineProperty(apiLoader.services['backupstorage'], '2018-04-10', { + get: function get() { + var model = __nccwpck_require__(97436); + model.paginators = (__nccwpck_require__(73644)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.BackupStorage; + + +/***/ }), + +/***/ 10000: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['batch'] = {}; +AWS.Batch = Service.defineService('batch', ['2016-08-10']); +Object.defineProperty(apiLoader.services['batch'], '2016-08-10', { + get: function get() { + var model = __nccwpck_require__(12617); + model.paginators = (__nccwpck_require__(36988)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Batch; + + +/***/ }), + +/***/ 56703: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['bcmdataexports'] = {}; +AWS.BCMDataExports = Service.defineService('bcmdataexports', ['2023-11-26']); +Object.defineProperty(apiLoader.services['bcmdataexports'], '2023-11-26', { + get: function get() { + var model = __nccwpck_require__(81311); + model.paginators = (__nccwpck_require__(56228)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.BCMDataExports; + + +/***/ }), + +/***/ 88808: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['bedrock'] = {}; +AWS.Bedrock = Service.defineService('bedrock', ['2023-04-20']); +Object.defineProperty(apiLoader.services['bedrock'], '2023-04-20', { + get: function get() { + var model = __nccwpck_require__(30615); + model.paginators = (__nccwpck_require__(27869)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(81806)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Bedrock; + + +/***/ }), + +/***/ 9025: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['bedrockagent'] = {}; +AWS.BedrockAgent = Service.defineService('bedrockagent', ['2023-06-05']); +Object.defineProperty(apiLoader.services['bedrockagent'], '2023-06-05', { + get: function get() { + var model = __nccwpck_require__(30755); + model.paginators = (__nccwpck_require__(7041)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.BedrockAgent; + + +/***/ }), + +/***/ 16946: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['bedrockagentruntime'] = {}; +AWS.BedrockAgentRuntime = Service.defineService('bedrockagentruntime', ['2023-07-26']); +Object.defineProperty(apiLoader.services['bedrockagentruntime'], '2023-07-26', { + get: function get() { + var model = __nccwpck_require__(65069); + model.paginators = (__nccwpck_require__(79273)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.BedrockAgentRuntime; + + +/***/ }), + +/***/ 99177: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['bedrockruntime'] = {}; +AWS.BedrockRuntime = Service.defineService('bedrockruntime', ['2023-09-30']); +Object.defineProperty(apiLoader.services['bedrockruntime'], '2023-09-30', { + get: function get() { + var model = __nccwpck_require__(38801); + model.paginators = (__nccwpck_require__(89474)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(34859)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.BedrockRuntime; + + +/***/ }), + +/***/ 38416: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['billingconductor'] = {}; +AWS.Billingconductor = Service.defineService('billingconductor', ['2021-07-30']); +Object.defineProperty(apiLoader.services['billingconductor'], '2021-07-30', { + get: function get() { + var model = __nccwpck_require__(54862); + model.paginators = (__nccwpck_require__(97894)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(64224)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Billingconductor; + + +/***/ }), + +/***/ 35429: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['braket'] = {}; +AWS.Braket = Service.defineService('braket', ['2019-09-01']); +Object.defineProperty(apiLoader.services['braket'], '2019-09-01', { + get: function get() { + var model = __nccwpck_require__(23332); + model.paginators = (__nccwpck_require__(15732)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Braket; + + +/***/ }), + +/***/ 43923: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['budgets'] = {}; +AWS.Budgets = Service.defineService('budgets', ['2016-10-20']); +Object.defineProperty(apiLoader.services['budgets'], '2016-10-20', { + get: function get() { + var model = __nccwpck_require__(11978); + model.paginators = (__nccwpck_require__(23694)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Budgets; + + +/***/ }), + +/***/ 84646: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['chime'] = {}; +AWS.Chime = Service.defineService('chime', ['2018-05-01']); +Object.defineProperty(apiLoader.services['chime'], '2018-05-01', { + get: function get() { + var model = __nccwpck_require__(44811); + model.paginators = (__nccwpck_require__(31890)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Chime; + + +/***/ }), + +/***/ 55975: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['chimesdkidentity'] = {}; +AWS.ChimeSDKIdentity = Service.defineService('chimesdkidentity', ['2021-04-20']); +Object.defineProperty(apiLoader.services['chimesdkidentity'], '2021-04-20', { + get: function get() { + var model = __nccwpck_require__(97402); + model.paginators = (__nccwpck_require__(133)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ChimeSDKIdentity; + + +/***/ }), + +/***/ 18423: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['chimesdkmediapipelines'] = {}; +AWS.ChimeSDKMediaPipelines = Service.defineService('chimesdkmediapipelines', ['2021-07-15']); +Object.defineProperty(apiLoader.services['chimesdkmediapipelines'], '2021-07-15', { + get: function get() { + var model = __nccwpck_require__(14679); + model.paginators = (__nccwpck_require__(82201)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ChimeSDKMediaPipelines; + + +/***/ }), + +/***/ 80788: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['chimesdkmeetings'] = {}; +AWS.ChimeSDKMeetings = Service.defineService('chimesdkmeetings', ['2021-07-15']); +Object.defineProperty(apiLoader.services['chimesdkmeetings'], '2021-07-15', { + get: function get() { + var model = __nccwpck_require__(17090); + model.paginators = (__nccwpck_require__(70582)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ChimeSDKMeetings; + + +/***/ }), + +/***/ 25255: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['chimesdkmessaging'] = {}; +AWS.ChimeSDKMessaging = Service.defineService('chimesdkmessaging', ['2021-05-15']); +Object.defineProperty(apiLoader.services['chimesdkmessaging'], '2021-05-15', { + get: function get() { + var model = __nccwpck_require__(52239); + model.paginators = (__nccwpck_require__(60807)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ChimeSDKMessaging; + + +/***/ }), + +/***/ 349: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['chimesdkvoice'] = {}; +AWS.ChimeSDKVoice = Service.defineService('chimesdkvoice', ['2022-08-03']); +Object.defineProperty(apiLoader.services['chimesdkvoice'], '2022-08-03', { + get: function get() { + var model = __nccwpck_require__(26420); + model.paginators = (__nccwpck_require__(7986)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ChimeSDKVoice; + + +/***/ }), + +/***/ 15130: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['cleanrooms'] = {}; +AWS.CleanRooms = Service.defineService('cleanrooms', ['2022-02-17']); +Object.defineProperty(apiLoader.services['cleanrooms'], '2022-02-17', { + get: function get() { + var model = __nccwpck_require__(11585); + model.paginators = (__nccwpck_require__(73060)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(29284)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CleanRooms; + + +/***/ }), + +/***/ 47594: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['cleanroomsml'] = {}; +AWS.CleanRoomsML = Service.defineService('cleanroomsml', ['2023-09-06']); +Object.defineProperty(apiLoader.services['cleanroomsml'], '2023-09-06', { + get: function get() { + var model = __nccwpck_require__(1867); + model.paginators = (__nccwpck_require__(89767)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CleanRoomsML; + + +/***/ }), + +/***/ 85473: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['cloud9'] = {}; +AWS.Cloud9 = Service.defineService('cloud9', ['2017-09-23']); +Object.defineProperty(apiLoader.services['cloud9'], '2017-09-23', { + get: function get() { + var model = __nccwpck_require__(82981); + model.paginators = (__nccwpck_require__(9313)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Cloud9; + + +/***/ }), + +/***/ 25630: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['cloudcontrol'] = {}; +AWS.CloudControl = Service.defineService('cloudcontrol', ['2021-09-30']); +Object.defineProperty(apiLoader.services['cloudcontrol'], '2021-09-30', { + get: function get() { + var model = __nccwpck_require__(24689); + model.paginators = (__nccwpck_require__(16041)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(31933)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CloudControl; + + +/***/ }), + +/***/ 56231: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['clouddirectory'] = {}; +AWS.CloudDirectory = Service.defineService('clouddirectory', ['2016-05-10', '2016-05-10*', '2017-01-11']); +Object.defineProperty(apiLoader.services['clouddirectory'], '2016-05-10', { + get: function get() { + var model = __nccwpck_require__(72862); + model.paginators = (__nccwpck_require__(87597)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); +Object.defineProperty(apiLoader.services['clouddirectory'], '2017-01-11', { + get: function get() { + var model = __nccwpck_require__(88729); + model.paginators = (__nccwpck_require__(10156)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CloudDirectory; + + +/***/ }), + +/***/ 74643: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['cloudformation'] = {}; +AWS.CloudFormation = Service.defineService('cloudformation', ['2010-05-15']); +Object.defineProperty(apiLoader.services['cloudformation'], '2010-05-15', { + get: function get() { + var model = __nccwpck_require__(31930); + model.paginators = (__nccwpck_require__(10611)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(53732)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CloudFormation; + + +/***/ }), + +/***/ 48058: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['cloudfront'] = {}; +AWS.CloudFront = Service.defineService('cloudfront', ['2013-05-12*', '2013-11-11*', '2014-05-31*', '2014-10-21*', '2014-11-06*', '2015-04-17*', '2015-07-27*', '2015-09-17*', '2016-01-13*', '2016-01-28*', '2016-08-01*', '2016-08-20*', '2016-09-07*', '2016-09-29*', '2016-11-25', '2016-11-25*', '2017-03-25', '2017-03-25*', '2017-10-30', '2017-10-30*', '2018-06-18', '2018-06-18*', '2018-11-05', '2018-11-05*', '2019-03-26', '2019-03-26*', '2020-05-31']); +__nccwpck_require__(95483); +Object.defineProperty(apiLoader.services['cloudfront'], '2016-11-25', { + get: function get() { + var model = __nccwpck_require__(64908); + model.paginators = (__nccwpck_require__(57305)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(71106)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); +Object.defineProperty(apiLoader.services['cloudfront'], '2017-03-25', { + get: function get() { + var model = __nccwpck_require__(76944); + model.paginators = (__nccwpck_require__(83654)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(83406)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); +Object.defineProperty(apiLoader.services['cloudfront'], '2017-10-30', { + get: function get() { + var model = __nccwpck_require__(80198); + model.paginators = (__nccwpck_require__(52915)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(13399)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); +Object.defineProperty(apiLoader.services['cloudfront'], '2018-06-18', { + get: function get() { + var model = __nccwpck_require__(29549); + model.paginators = (__nccwpck_require__(7805)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(2353)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); +Object.defineProperty(apiLoader.services['cloudfront'], '2018-11-05', { + get: function get() { + var model = __nccwpck_require__(22253); + model.paginators = (__nccwpck_require__(29533)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(36883)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); +Object.defineProperty(apiLoader.services['cloudfront'], '2019-03-26', { + get: function get() { + var model = __nccwpck_require__(29574); + model.paginators = (__nccwpck_require__(35556)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(97142)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); +Object.defineProperty(apiLoader.services['cloudfront'], '2020-05-31', { + get: function get() { + var model = __nccwpck_require__(66310); + model.paginators = (__nccwpck_require__(48335)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(83517)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CloudFront; + + +/***/ }), + +/***/ 47859: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['cloudfrontkeyvaluestore'] = {}; +AWS.CloudFrontKeyValueStore = Service.defineService('cloudfrontkeyvaluestore', ['2022-07-26']); +Object.defineProperty(apiLoader.services['cloudfrontkeyvaluestore'], '2022-07-26', { + get: function get() { + var model = __nccwpck_require__(49651); + model.paginators = (__nccwpck_require__(41274)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CloudFrontKeyValueStore; + + +/***/ }), + +/***/ 59976: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['cloudhsm'] = {}; +AWS.CloudHSM = Service.defineService('cloudhsm', ['2014-05-30']); +Object.defineProperty(apiLoader.services['cloudhsm'], '2014-05-30', { + get: function get() { + var model = __nccwpck_require__(18637); + model.paginators = (__nccwpck_require__(18988)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CloudHSM; + + +/***/ }), + +/***/ 70889: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['cloudhsmv2'] = {}; +AWS.CloudHSMV2 = Service.defineService('cloudhsmv2', ['2017-04-28']); +Object.defineProperty(apiLoader.services['cloudhsmv2'], '2017-04-28', { + get: function get() { + var model = __nccwpck_require__(90554); + model.paginators = (__nccwpck_require__(77334)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CloudHSMV2; + + +/***/ }), + +/***/ 72321: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['cloudsearch'] = {}; +AWS.CloudSearch = Service.defineService('cloudsearch', ['2011-02-01', '2013-01-01']); +Object.defineProperty(apiLoader.services['cloudsearch'], '2011-02-01', { + get: function get() { + var model = __nccwpck_require__(11732); + model.paginators = (__nccwpck_require__(51357)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); +Object.defineProperty(apiLoader.services['cloudsearch'], '2013-01-01', { + get: function get() { + var model = __nccwpck_require__(56880); + model.paginators = (__nccwpck_require__(81127)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CloudSearch; + + +/***/ }), + +/***/ 64072: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['cloudsearchdomain'] = {}; +AWS.CloudSearchDomain = Service.defineService('cloudsearchdomain', ['2013-01-01']); +__nccwpck_require__(48571); +Object.defineProperty(apiLoader.services['cloudsearchdomain'], '2013-01-01', { + get: function get() { + var model = __nccwpck_require__(78255); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CloudSearchDomain; + + +/***/ }), + +/***/ 65512: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['cloudtrail'] = {}; +AWS.CloudTrail = Service.defineService('cloudtrail', ['2013-11-01']); +Object.defineProperty(apiLoader.services['cloudtrail'], '2013-11-01', { + get: function get() { + var model = __nccwpck_require__(11506); + model.paginators = (__nccwpck_require__(27523)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CloudTrail; + + +/***/ }), + +/***/ 31191: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['cloudtraildata'] = {}; +AWS.CloudTrailData = Service.defineService('cloudtraildata', ['2021-08-11']); +Object.defineProperty(apiLoader.services['cloudtraildata'], '2021-08-11', { + get: function get() { + var model = __nccwpck_require__(27372); + model.paginators = (__nccwpck_require__(79223)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CloudTrailData; + + +/***/ }), + +/***/ 6763: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['cloudwatch'] = {}; +AWS.CloudWatch = Service.defineService('cloudwatch', ['2010-08-01']); +Object.defineProperty(apiLoader.services['cloudwatch'], '2010-08-01', { + get: function get() { + var model = __nccwpck_require__(16363); + model.paginators = (__nccwpck_require__(46675)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(21466)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CloudWatch; + + +/***/ }), + +/***/ 38124: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['cloudwatchevents'] = {}; +AWS.CloudWatchEvents = Service.defineService('cloudwatchevents', ['2014-02-03*', '2015-10-07']); +Object.defineProperty(apiLoader.services['cloudwatchevents'], '2015-10-07', { + get: function get() { + var model = __nccwpck_require__(40299); + model.paginators = (__nccwpck_require__(54031)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CloudWatchEvents; + + +/***/ }), + +/***/ 96693: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['cloudwatchlogs'] = {}; +AWS.CloudWatchLogs = Service.defineService('cloudwatchlogs', ['2014-03-28']); +Object.defineProperty(apiLoader.services['cloudwatchlogs'], '2014-03-28', { + get: function get() { + var model = __nccwpck_require__(73044); + model.paginators = (__nccwpck_require__(15472)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CloudWatchLogs; + + +/***/ }), + +/***/ 91983: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['codeartifact'] = {}; +AWS.CodeArtifact = Service.defineService('codeartifact', ['2018-09-22']); +Object.defineProperty(apiLoader.services['codeartifact'], '2018-09-22', { + get: function get() { + var model = __nccwpck_require__(87923); + model.paginators = (__nccwpck_require__(40983)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CodeArtifact; + + +/***/ }), + +/***/ 60450: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['codebuild'] = {}; +AWS.CodeBuild = Service.defineService('codebuild', ['2016-10-06']); +Object.defineProperty(apiLoader.services['codebuild'], '2016-10-06', { + get: function get() { + var model = __nccwpck_require__(40893); + model.paginators = (__nccwpck_require__(23010)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CodeBuild; + + +/***/ }), + +/***/ 19499: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['codecatalyst'] = {}; +AWS.CodeCatalyst = Service.defineService('codecatalyst', ['2022-09-28']); +Object.defineProperty(apiLoader.services['codecatalyst'], '2022-09-28', { + get: function get() { + var model = __nccwpck_require__(22999); + model.paginators = (__nccwpck_require__(14522)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(42522)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CodeCatalyst; + + +/***/ }), + +/***/ 71323: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['codecommit'] = {}; +AWS.CodeCommit = Service.defineService('codecommit', ['2015-04-13']); +Object.defineProperty(apiLoader.services['codecommit'], '2015-04-13', { + get: function get() { + var model = __nccwpck_require__(57144); + model.paginators = (__nccwpck_require__(62599)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CodeCommit; + + +/***/ }), + +/***/ 54599: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['codedeploy'] = {}; +AWS.CodeDeploy = Service.defineService('codedeploy', ['2014-10-06']); +Object.defineProperty(apiLoader.services['codedeploy'], '2014-10-06', { + get: function get() { + var model = __nccwpck_require__(10967); + model.paginators = (__nccwpck_require__(1917)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(52416)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CodeDeploy; + + +/***/ }), + +/***/ 65704: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['codeguruprofiler'] = {}; +AWS.CodeGuruProfiler = Service.defineService('codeguruprofiler', ['2019-07-18']); +Object.defineProperty(apiLoader.services['codeguruprofiler'], '2019-07-18', { + get: function get() { + var model = __nccwpck_require__(34890); + model.paginators = (__nccwpck_require__(25274)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CodeGuruProfiler; + + +/***/ }), + +/***/ 60070: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['codegurureviewer'] = {}; +AWS.CodeGuruReviewer = Service.defineService('codegurureviewer', ['2019-09-19']); +Object.defineProperty(apiLoader.services['codegurureviewer'], '2019-09-19', { + get: function get() { + var model = __nccwpck_require__(66739); + model.paginators = (__nccwpck_require__(37775)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(69276)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CodeGuruReviewer; + + +/***/ }), + +/***/ 32620: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['codegurusecurity'] = {}; +AWS.CodeGuruSecurity = Service.defineService('codegurusecurity', ['2018-05-10']); +Object.defineProperty(apiLoader.services['codegurusecurity'], '2018-05-10', { + get: function get() { + var model = __nccwpck_require__(7662); + model.paginators = (__nccwpck_require__(77755)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CodeGuruSecurity; + + +/***/ }), + +/***/ 22938: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['codepipeline'] = {}; +AWS.CodePipeline = Service.defineService('codepipeline', ['2015-07-09']); +Object.defineProperty(apiLoader.services['codepipeline'], '2015-07-09', { + get: function get() { + var model = __nccwpck_require__(4039); + model.paginators = (__nccwpck_require__(78953)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CodePipeline; + + +/***/ }), + +/***/ 98336: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['codestar'] = {}; +AWS.CodeStar = Service.defineService('codestar', ['2017-04-19']); +Object.defineProperty(apiLoader.services['codestar'], '2017-04-19', { + get: function get() { + var model = __nccwpck_require__(12425); + model.paginators = (__nccwpck_require__(70046)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CodeStar; + + +/***/ }), + +/***/ 78270: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['codestarconnections'] = {}; +AWS.CodeStarconnections = Service.defineService('codestarconnections', ['2019-12-01']); +Object.defineProperty(apiLoader.services['codestarconnections'], '2019-12-01', { + get: function get() { + var model = __nccwpck_require__(88428); + model.paginators = (__nccwpck_require__(31506)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CodeStarconnections; + + +/***/ }), + +/***/ 15141: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['codestarnotifications'] = {}; +AWS.CodeStarNotifications = Service.defineService('codestarnotifications', ['2019-10-15']); +Object.defineProperty(apiLoader.services['codestarnotifications'], '2019-10-15', { + get: function get() { + var model = __nccwpck_require__(33362); + model.paginators = (__nccwpck_require__(44301)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CodeStarNotifications; + + +/***/ }), + +/***/ 58291: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['cognitoidentity'] = {}; +AWS.CognitoIdentity = Service.defineService('cognitoidentity', ['2014-06-30']); +Object.defineProperty(apiLoader.services['cognitoidentity'], '2014-06-30', { + get: function get() { + var model = __nccwpck_require__(57377); + model.paginators = (__nccwpck_require__(85010)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CognitoIdentity; + + +/***/ }), + +/***/ 31379: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['cognitoidentityserviceprovider'] = {}; +AWS.CognitoIdentityServiceProvider = Service.defineService('cognitoidentityserviceprovider', ['2016-04-18']); +Object.defineProperty(apiLoader.services['cognitoidentityserviceprovider'], '2016-04-18', { + get: function get() { + var model = __nccwpck_require__(53166); + model.paginators = (__nccwpck_require__(17149)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CognitoIdentityServiceProvider; + + +/***/ }), + +/***/ 74770: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['cognitosync'] = {}; +AWS.CognitoSync = Service.defineService('cognitosync', ['2014-06-30']); +Object.defineProperty(apiLoader.services['cognitosync'], '2014-06-30', { + get: function get() { + var model = __nccwpck_require__(29128); + model.paginators = (__nccwpck_require__(5865)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CognitoSync; + + +/***/ }), + +/***/ 62878: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['comprehend'] = {}; +AWS.Comprehend = Service.defineService('comprehend', ['2017-11-27']); +Object.defineProperty(apiLoader.services['comprehend'], '2017-11-27', { + get: function get() { + var model = __nccwpck_require__(24433); + model.paginators = (__nccwpck_require__(82518)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Comprehend; + + +/***/ }), + +/***/ 32349: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['comprehendmedical'] = {}; +AWS.ComprehendMedical = Service.defineService('comprehendmedical', ['2018-10-30']); +Object.defineProperty(apiLoader.services['comprehendmedical'], '2018-10-30', { + get: function get() { + var model = __nccwpck_require__(96649); + model.paginators = (__nccwpck_require__(43172)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ComprehendMedical; + + +/***/ }), + +/***/ 64459: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['computeoptimizer'] = {}; +AWS.ComputeOptimizer = Service.defineService('computeoptimizer', ['2019-11-01']); +Object.defineProperty(apiLoader.services['computeoptimizer'], '2019-11-01', { + get: function get() { + var model = __nccwpck_require__(85802); + model.paginators = (__nccwpck_require__(6831)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ComputeOptimizer; + + +/***/ }), + +/***/ 34061: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['configservice'] = {}; +AWS.ConfigService = Service.defineService('configservice', ['2014-11-12']); +Object.defineProperty(apiLoader.services['configservice'], '2014-11-12', { + get: function get() { + var model = __nccwpck_require__(47124); + model.paginators = (__nccwpck_require__(85980)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ConfigService; + + +/***/ }), + +/***/ 13879: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['connect'] = {}; +AWS.Connect = Service.defineService('connect', ['2017-08-08']); +Object.defineProperty(apiLoader.services['connect'], '2017-08-08', { + get: function get() { + var model = __nccwpck_require__(54511); + model.paginators = (__nccwpck_require__(19742)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Connect; + + +/***/ }), + +/***/ 42789: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['connectcampaigns'] = {}; +AWS.ConnectCampaigns = Service.defineService('connectcampaigns', ['2021-01-30']); +Object.defineProperty(apiLoader.services['connectcampaigns'], '2021-01-30', { + get: function get() { + var model = __nccwpck_require__(71566); + model.paginators = (__nccwpck_require__(45198)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ConnectCampaigns; + + +/***/ }), + +/***/ 72223: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['connectcases'] = {}; +AWS.ConnectCases = Service.defineService('connectcases', ['2022-10-03']); +Object.defineProperty(apiLoader.services['connectcases'], '2022-10-03', { + get: function get() { + var model = __nccwpck_require__(3923); + model.paginators = (__nccwpck_require__(8429)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ConnectCases; + + +/***/ }), + +/***/ 41847: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['connectcontactlens'] = {}; +AWS.ConnectContactLens = Service.defineService('connectcontactlens', ['2020-08-21']); +Object.defineProperty(apiLoader.services['connectcontactlens'], '2020-08-21', { + get: function get() { + var model = __nccwpck_require__(16527); + model.paginators = (__nccwpck_require__(76658)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ConnectContactLens; + + +/***/ }), + +/***/ 94198: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['connectparticipant'] = {}; +AWS.ConnectParticipant = Service.defineService('connectparticipant', ['2018-09-07']); +Object.defineProperty(apiLoader.services['connectparticipant'], '2018-09-07', { + get: function get() { + var model = __nccwpck_require__(70132); + model.paginators = (__nccwpck_require__(29947)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ConnectParticipant; + + +/***/ }), + +/***/ 77574: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['controltower'] = {}; +AWS.ControlTower = Service.defineService('controltower', ['2018-05-10']); +Object.defineProperty(apiLoader.services['controltower'], '2018-05-10', { + get: function get() { + var model = __nccwpck_require__(1095); + model.paginators = (__nccwpck_require__(55167)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ControlTower; + + +/***/ }), + +/***/ 79523: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['costexplorer'] = {}; +AWS.CostExplorer = Service.defineService('costexplorer', ['2017-10-25']); +Object.defineProperty(apiLoader.services['costexplorer'], '2017-10-25', { + get: function get() { + var model = __nccwpck_require__(4060); + model.paginators = (__nccwpck_require__(75642)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CostExplorer; + + +/***/ }), + +/***/ 55443: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['costoptimizationhub'] = {}; +AWS.CostOptimizationHub = Service.defineService('costoptimizationhub', ['2022-07-26']); +Object.defineProperty(apiLoader.services['costoptimizationhub'], '2022-07-26', { + get: function get() { + var model = __nccwpck_require__(56073); + model.paginators = (__nccwpck_require__(70563)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CostOptimizationHub; + + +/***/ }), + +/***/ 5026: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['cur'] = {}; +AWS.CUR = Service.defineService('cur', ['2017-01-06']); +Object.defineProperty(apiLoader.services['cur'], '2017-01-06', { + get: function get() { + var model = __nccwpck_require__(46858); + model.paginators = (__nccwpck_require__(40528)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CUR; + + +/***/ }), + +/***/ 28379: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['customerprofiles'] = {}; +AWS.CustomerProfiles = Service.defineService('customerprofiles', ['2020-08-15']); +Object.defineProperty(apiLoader.services['customerprofiles'], '2020-08-15', { + get: function get() { + var model = __nccwpck_require__(56793); + model.paginators = (__nccwpck_require__(53892)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.CustomerProfiles; + + +/***/ }), + +/***/ 35846: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['databrew'] = {}; +AWS.DataBrew = Service.defineService('databrew', ['2017-07-25']); +Object.defineProperty(apiLoader.services['databrew'], '2017-07-25', { + get: function get() { + var model = __nccwpck_require__(96089); + model.paginators = (__nccwpck_require__(92224)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.DataBrew; + + +/***/ }), + +/***/ 11024: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['dataexchange'] = {}; +AWS.DataExchange = Service.defineService('dataexchange', ['2017-07-25']); +Object.defineProperty(apiLoader.services['dataexchange'], '2017-07-25', { + get: function get() { + var model = __nccwpck_require__(42346); + model.paginators = (__nccwpck_require__(55607)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(43176)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.DataExchange; + + +/***/ }), + +/***/ 65688: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['datapipeline'] = {}; +AWS.DataPipeline = Service.defineService('datapipeline', ['2012-10-29']); +Object.defineProperty(apiLoader.services['datapipeline'], '2012-10-29', { + get: function get() { + var model = __nccwpck_require__(79908); + model.paginators = (__nccwpck_require__(89659)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.DataPipeline; + + +/***/ }), + +/***/ 25308: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['datasync'] = {}; +AWS.DataSync = Service.defineService('datasync', ['2018-11-09']); +Object.defineProperty(apiLoader.services['datasync'], '2018-11-09', { + get: function get() { + var model = __nccwpck_require__(93640); + model.paginators = (__nccwpck_require__(80063)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.DataSync; + + +/***/ }), + +/***/ 31763: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['datazone'] = {}; +AWS.DataZone = Service.defineService('datazone', ['2018-05-10']); +Object.defineProperty(apiLoader.services['datazone'], '2018-05-10', { + get: function get() { + var model = __nccwpck_require__(70813); + model.paginators = (__nccwpck_require__(10509)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.DataZone; + + +/***/ }), + +/***/ 71398: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['dax'] = {}; +AWS.DAX = Service.defineService('dax', ['2017-04-19']); +Object.defineProperty(apiLoader.services['dax'], '2017-04-19', { + get: function get() { + var model = __nccwpck_require__(24709); + model.paginators = (__nccwpck_require__(87564)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.DAX; + + +/***/ }), + +/***/ 60674: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['detective'] = {}; +AWS.Detective = Service.defineService('detective', ['2018-10-26']); +Object.defineProperty(apiLoader.services['detective'], '2018-10-26', { + get: function get() { + var model = __nccwpck_require__(25236); + model.paginators = (__nccwpck_require__(46384)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Detective; + + +/***/ }), + +/***/ 26272: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['devicefarm'] = {}; +AWS.DeviceFarm = Service.defineService('devicefarm', ['2015-06-23']); +Object.defineProperty(apiLoader.services['devicefarm'], '2015-06-23', { + get: function get() { + var model = __nccwpck_require__(34023); + model.paginators = (__nccwpck_require__(37161)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.DeviceFarm; + + +/***/ }), + +/***/ 90673: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['devopsguru'] = {}; +AWS.DevOpsGuru = Service.defineService('devopsguru', ['2020-12-01']); +Object.defineProperty(apiLoader.services['devopsguru'], '2020-12-01', { + get: function get() { + var model = __nccwpck_require__(36592); + model.paginators = (__nccwpck_require__(95551)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.DevOpsGuru; + + +/***/ }), + +/***/ 73783: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['directconnect'] = {}; +AWS.DirectConnect = Service.defineService('directconnect', ['2012-10-25']); +Object.defineProperty(apiLoader.services['directconnect'], '2012-10-25', { + get: function get() { + var model = __nccwpck_require__(45125); + model.paginators = (__nccwpck_require__(26404)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.DirectConnect; + + +/***/ }), + +/***/ 83908: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['directoryservice'] = {}; +AWS.DirectoryService = Service.defineService('directoryservice', ['2015-04-16']); +Object.defineProperty(apiLoader.services['directoryservice'], '2015-04-16', { + get: function get() { + var model = __nccwpck_require__(47357); + model.paginators = (__nccwpck_require__(93412)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.DirectoryService; + + +/***/ }), + +/***/ 81690: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['discovery'] = {}; +AWS.Discovery = Service.defineService('discovery', ['2015-11-01']); +Object.defineProperty(apiLoader.services['discovery'], '2015-11-01', { + get: function get() { + var model = __nccwpck_require__(68951); + model.paginators = (__nccwpck_require__(19822)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Discovery; + + +/***/ }), + +/***/ 24958: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['dlm'] = {}; +AWS.DLM = Service.defineService('dlm', ['2018-01-12']); +Object.defineProperty(apiLoader.services['dlm'], '2018-01-12', { + get: function get() { + var model = __nccwpck_require__(75485); + model.paginators = (__nccwpck_require__(98881)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.DLM; + + +/***/ }), + +/***/ 69868: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['dms'] = {}; +AWS.DMS = Service.defineService('dms', ['2016-01-01']); +Object.defineProperty(apiLoader.services['dms'], '2016-01-01', { + get: function get() { + var model = __nccwpck_require__(77953); + model.paginators = (__nccwpck_require__(36772)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(3500)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.DMS; + + +/***/ }), + +/***/ 55129: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['docdb'] = {}; +AWS.DocDB = Service.defineService('docdb', ['2014-10-31']); +__nccwpck_require__(59050); +Object.defineProperty(apiLoader.services['docdb'], '2014-10-31', { + get: function get() { + var model = __nccwpck_require__(4932); + model.paginators = (__nccwpck_require__(41408)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(36607)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.DocDB; + + +/***/ }), + +/***/ 20792: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['docdbelastic'] = {}; +AWS.DocDBElastic = Service.defineService('docdbelastic', ['2022-11-28']); +Object.defineProperty(apiLoader.services['docdbelastic'], '2022-11-28', { + get: function get() { + var model = __nccwpck_require__(34162); + model.paginators = (__nccwpck_require__(89093)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.DocDBElastic; + + +/***/ }), + +/***/ 41116: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['drs'] = {}; +AWS.Drs = Service.defineService('drs', ['2020-02-26']); +Object.defineProperty(apiLoader.services['drs'], '2020-02-26', { + get: function get() { + var model = __nccwpck_require__(42548); + model.paginators = (__nccwpck_require__(44057)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Drs; + + +/***/ }), + +/***/ 14347: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['dynamodb'] = {}; +AWS.DynamoDB = Service.defineService('dynamodb', ['2011-12-05', '2012-08-10']); +__nccwpck_require__(17101); +Object.defineProperty(apiLoader.services['dynamodb'], '2011-12-05', { + get: function get() { + var model = __nccwpck_require__(46148); + model.paginators = (__nccwpck_require__(86884)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(24864)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); +Object.defineProperty(apiLoader.services['dynamodb'], '2012-08-10', { + get: function get() { + var model = __nccwpck_require__(54047); + model.paginators = (__nccwpck_require__(30482)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(48411)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.DynamoDB; + + +/***/ }), + +/***/ 88090: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['dynamodbstreams'] = {}; +AWS.DynamoDBStreams = Service.defineService('dynamodbstreams', ['2012-08-10']); +Object.defineProperty(apiLoader.services['dynamodbstreams'], '2012-08-10', { + get: function get() { + var model = __nccwpck_require__(26098); + model.paginators = (__nccwpck_require__(40549)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.DynamoDBStreams; + + +/***/ }), + +/***/ 62837: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['ebs'] = {}; +AWS.EBS = Service.defineService('ebs', ['2019-11-02']); +Object.defineProperty(apiLoader.services['ebs'], '2019-11-02', { + get: function get() { + var model = __nccwpck_require__(72220); + model.paginators = (__nccwpck_require__(85366)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.EBS; + + +/***/ }), + +/***/ 7778: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['ec2'] = {}; +AWS.EC2 = Service.defineService('ec2', ['2013-06-15*', '2013-10-15*', '2014-02-01*', '2014-05-01*', '2014-06-15*', '2014-09-01*', '2014-10-01*', '2015-03-01*', '2015-04-15*', '2015-10-01*', '2016-04-01*', '2016-09-15*', '2016-11-15']); +__nccwpck_require__(92501); +Object.defineProperty(apiLoader.services['ec2'], '2016-11-15', { + get: function get() { + var model = __nccwpck_require__(2658); + model.paginators = (__nccwpck_require__(82477)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(19153)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.EC2; + + +/***/ }), + +/***/ 92209: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['ec2instanceconnect'] = {}; +AWS.EC2InstanceConnect = Service.defineService('ec2instanceconnect', ['2018-04-02']); +Object.defineProperty(apiLoader.services['ec2instanceconnect'], '2018-04-02', { + get: function get() { + var model = __nccwpck_require__(36007); + model.paginators = (__nccwpck_require__(38333)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.EC2InstanceConnect; + + +/***/ }), + +/***/ 15211: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['ecr'] = {}; +AWS.ECR = Service.defineService('ecr', ['2015-09-21']); +Object.defineProperty(apiLoader.services['ecr'], '2015-09-21', { + get: function get() { + var model = __nccwpck_require__(92405); + model.paginators = (__nccwpck_require__(25504)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(78925)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ECR; + + +/***/ }), + +/***/ 90244: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['ecrpublic'] = {}; +AWS.ECRPUBLIC = Service.defineService('ecrpublic', ['2020-10-30']); +Object.defineProperty(apiLoader.services['ecrpublic'], '2020-10-30', { + get: function get() { + var model = __nccwpck_require__(9668); + model.paginators = (__nccwpck_require__(81193)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ECRPUBLIC; + + +/***/ }), + +/***/ 16615: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['ecs'] = {}; +AWS.ECS = Service.defineService('ecs', ['2014-11-13']); +Object.defineProperty(apiLoader.services['ecs'], '2014-11-13', { + get: function get() { + var model = __nccwpck_require__(44208); + model.paginators = (__nccwpck_require__(15738)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(1299)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ECS; + + +/***/ }), + +/***/ 34375: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['efs'] = {}; +AWS.EFS = Service.defineService('efs', ['2015-02-01']); +Object.defineProperty(apiLoader.services['efs'], '2015-02-01', { + get: function get() { + var model = __nccwpck_require__(54784); + model.paginators = (__nccwpck_require__(40174)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.EFS; + + +/***/ }), + +/***/ 23337: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['eks'] = {}; +AWS.EKS = Service.defineService('eks', ['2017-11-01']); +Object.defineProperty(apiLoader.services['eks'], '2017-11-01', { + get: function get() { + var model = __nccwpck_require__(51370); + model.paginators = (__nccwpck_require__(36490)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(88058)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.EKS; + + +/***/ }), + +/***/ 66164: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['eksauth'] = {}; +AWS.EKSAuth = Service.defineService('eksauth', ['2023-11-26']); +Object.defineProperty(apiLoader.services['eksauth'], '2023-11-26', { + get: function get() { + var model = __nccwpck_require__(92414); + model.paginators = (__nccwpck_require__(8222)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(33480)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.EKSAuth; + + +/***/ }), + +/***/ 81065: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['elasticache'] = {}; +AWS.ElastiCache = Service.defineService('elasticache', ['2012-11-15*', '2014-03-24*', '2014-07-15*', '2014-09-30*', '2015-02-02']); +Object.defineProperty(apiLoader.services['elasticache'], '2015-02-02', { + get: function get() { + var model = __nccwpck_require__(58426); + model.paginators = (__nccwpck_require__(79559)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(29787)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ElastiCache; + + +/***/ }), + +/***/ 14897: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['elasticbeanstalk'] = {}; +AWS.ElasticBeanstalk = Service.defineService('elasticbeanstalk', ['2010-12-01']); +Object.defineProperty(apiLoader.services['elasticbeanstalk'], '2010-12-01', { + get: function get() { + var model = __nccwpck_require__(72508); + model.paginators = (__nccwpck_require__(72305)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(62534)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ElasticBeanstalk; + + +/***/ }), + +/***/ 37708: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['elasticinference'] = {}; +AWS.ElasticInference = Service.defineService('elasticinference', ['2017-07-25']); +Object.defineProperty(apiLoader.services['elasticinference'], '2017-07-25', { + get: function get() { + var model = __nccwpck_require__(83967); + model.paginators = (__nccwpck_require__(64906)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ElasticInference; + + +/***/ }), + +/***/ 40745: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['elastictranscoder'] = {}; +AWS.ElasticTranscoder = Service.defineService('elastictranscoder', ['2012-09-25']); +Object.defineProperty(apiLoader.services['elastictranscoder'], '2012-09-25', { + get: function get() { + var model = __nccwpck_require__(23463); + model.paginators = (__nccwpck_require__(36121)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(59345)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ElasticTranscoder; + + +/***/ }), + +/***/ 10907: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['elb'] = {}; +AWS.ELB = Service.defineService('elb', ['2012-06-01']); +Object.defineProperty(apiLoader.services['elb'], '2012-06-01', { + get: function get() { + var model = __nccwpck_require__(66258); + model.paginators = (__nccwpck_require__(77372)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(56717)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ELB; + + +/***/ }), + +/***/ 44311: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['elbv2'] = {}; +AWS.ELBv2 = Service.defineService('elbv2', ['2015-12-01']); +Object.defineProperty(apiLoader.services['elbv2'], '2015-12-01', { + get: function get() { + var model = __nccwpck_require__(42628); + model.paginators = (__nccwpck_require__(12274)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(56106)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ELBv2; + + +/***/ }), + +/***/ 50470: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['emr'] = {}; +AWS.EMR = Service.defineService('emr', ['2009-03-31']); +Object.defineProperty(apiLoader.services['emr'], '2009-03-31', { + get: function get() { + var model = __nccwpck_require__(91298); + model.paginators = (__nccwpck_require__(62965)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(86792)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.EMR; + + +/***/ }), + +/***/ 49984: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['emrcontainers'] = {}; +AWS.EMRcontainers = Service.defineService('emrcontainers', ['2020-10-01']); +Object.defineProperty(apiLoader.services['emrcontainers'], '2020-10-01', { + get: function get() { + var model = __nccwpck_require__(33922); + model.paginators = (__nccwpck_require__(87789)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.EMRcontainers; + + +/***/ }), + +/***/ 219: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['emrserverless'] = {}; +AWS.EMRServerless = Service.defineService('emrserverless', ['2021-07-13']); +Object.defineProperty(apiLoader.services['emrserverless'], '2021-07-13', { + get: function get() { + var model = __nccwpck_require__(41070); + model.paginators = (__nccwpck_require__(39521)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.EMRServerless; + + +/***/ }), + +/***/ 22697: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['entityresolution'] = {}; +AWS.EntityResolution = Service.defineService('entityresolution', ['2018-05-10']); +Object.defineProperty(apiLoader.services['entityresolution'], '2018-05-10', { + get: function get() { + var model = __nccwpck_require__(61033); + model.paginators = (__nccwpck_require__(37403)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.EntityResolution; + + +/***/ }), + +/***/ 84462: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['es'] = {}; +AWS.ES = Service.defineService('es', ['2015-01-01']); +Object.defineProperty(apiLoader.services['es'], '2015-01-01', { + get: function get() { + var model = __nccwpck_require__(33943); + model.paginators = (__nccwpck_require__(78836)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ES; + + +/***/ }), + +/***/ 898: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['eventbridge'] = {}; +AWS.EventBridge = Service.defineService('eventbridge', ['2015-10-07']); +__nccwpck_require__(3034); +Object.defineProperty(apiLoader.services['eventbridge'], '2015-10-07', { + get: function get() { + var model = __nccwpck_require__(9659); + model.paginators = (__nccwpck_require__(10871)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.EventBridge; + + +/***/ }), + +/***/ 21440: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['evidently'] = {}; +AWS.Evidently = Service.defineService('evidently', ['2021-02-01']); +Object.defineProperty(apiLoader.services['evidently'], '2021-02-01', { + get: function get() { + var model = __nccwpck_require__(41971); + model.paginators = (__nccwpck_require__(72960)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Evidently; + + +/***/ }), + +/***/ 3052: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['finspace'] = {}; +AWS.Finspace = Service.defineService('finspace', ['2021-03-12']); +Object.defineProperty(apiLoader.services['finspace'], '2021-03-12', { + get: function get() { + var model = __nccwpck_require__(37836); + model.paginators = (__nccwpck_require__(7328)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Finspace; + + +/***/ }), + +/***/ 96869: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['finspacedata'] = {}; +AWS.Finspacedata = Service.defineService('finspacedata', ['2020-07-13']); +Object.defineProperty(apiLoader.services['finspacedata'], '2020-07-13', { + get: function get() { + var model = __nccwpck_require__(83394); + model.paginators = (__nccwpck_require__(70371)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Finspacedata; + + +/***/ }), + +/***/ 92831: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['firehose'] = {}; +AWS.Firehose = Service.defineService('firehose', ['2015-08-04']); +Object.defineProperty(apiLoader.services['firehose'], '2015-08-04', { + get: function get() { + var model = __nccwpck_require__(48886); + model.paginators = (__nccwpck_require__(47400)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Firehose; + + +/***/ }), + +/***/ 73003: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['fis'] = {}; +AWS.Fis = Service.defineService('fis', ['2020-12-01']); +Object.defineProperty(apiLoader.services['fis'], '2020-12-01', { + get: function get() { + var model = __nccwpck_require__(98356); + model.paginators = (__nccwpck_require__(6544)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Fis; + + +/***/ }), + +/***/ 11316: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['fms'] = {}; +AWS.FMS = Service.defineService('fms', ['2018-01-01']); +Object.defineProperty(apiLoader.services['fms'], '2018-01-01', { + get: function get() { + var model = __nccwpck_require__(22212); + model.paginators = (__nccwpck_require__(49570)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.FMS; + + +/***/ }), + +/***/ 36822: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['forecastqueryservice'] = {}; +AWS.ForecastQueryService = Service.defineService('forecastqueryservice', ['2018-06-26']); +Object.defineProperty(apiLoader.services['forecastqueryservice'], '2018-06-26', { + get: function get() { + var model = __nccwpck_require__(23865); + model.paginators = (__nccwpck_require__(98135)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ForecastQueryService; + + +/***/ }), + +/***/ 12942: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['forecastservice'] = {}; +AWS.ForecastService = Service.defineService('forecastservice', ['2018-06-26']); +Object.defineProperty(apiLoader.services['forecastservice'], '2018-06-26', { + get: function get() { + var model = __nccwpck_require__(6468); + model.paginators = (__nccwpck_require__(45338)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ForecastService; + + +/***/ }), + +/***/ 99830: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['frauddetector'] = {}; +AWS.FraudDetector = Service.defineService('frauddetector', ['2019-11-15']); +Object.defineProperty(apiLoader.services['frauddetector'], '2019-11-15', { + get: function get() { + var model = __nccwpck_require__(96105); + model.paginators = (__nccwpck_require__(9177)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.FraudDetector; + + +/***/ }), + +/***/ 11681: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['freetier'] = {}; +AWS.FreeTier = Service.defineService('freetier', ['2023-09-07']); +Object.defineProperty(apiLoader.services['freetier'], '2023-09-07', { + get: function get() { + var model = __nccwpck_require__(50797); + model.paginators = (__nccwpck_require__(52266)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.FreeTier; + + +/***/ }), + +/***/ 60642: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['fsx'] = {}; +AWS.FSx = Service.defineService('fsx', ['2018-03-01']); +Object.defineProperty(apiLoader.services['fsx'], '2018-03-01', { + get: function get() { + var model = __nccwpck_require__(58245); + model.paginators = (__nccwpck_require__(19882)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.FSx; + + +/***/ }), + +/***/ 8085: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['gamelift'] = {}; +AWS.GameLift = Service.defineService('gamelift', ['2015-10-01']); +Object.defineProperty(apiLoader.services['gamelift'], '2015-10-01', { + get: function get() { + var model = __nccwpck_require__(69257); + model.paginators = (__nccwpck_require__(88381)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.GameLift; + + +/***/ }), + +/***/ 63249: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['glacier'] = {}; +AWS.Glacier = Service.defineService('glacier', ['2012-06-01']); +__nccwpck_require__(14472); +Object.defineProperty(apiLoader.services['glacier'], '2012-06-01', { + get: function get() { + var model = __nccwpck_require__(11545); + model.paginators = (__nccwpck_require__(54145)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(65182)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Glacier; + + +/***/ }), + +/***/ 19306: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['globalaccelerator'] = {}; +AWS.GlobalAccelerator = Service.defineService('globalaccelerator', ['2018-08-08']); +Object.defineProperty(apiLoader.services['globalaccelerator'], '2018-08-08', { + get: function get() { + var model = __nccwpck_require__(35365); + model.paginators = (__nccwpck_require__(14796)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.GlobalAccelerator; + + +/***/ }), + +/***/ 31658: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['glue'] = {}; +AWS.Glue = Service.defineService('glue', ['2017-03-31']); +Object.defineProperty(apiLoader.services['glue'], '2017-03-31', { + get: function get() { + var model = __nccwpck_require__(72268); + model.paginators = (__nccwpck_require__(26545)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Glue; + + +/***/ }), + +/***/ 51050: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['grafana'] = {}; +AWS.Grafana = Service.defineService('grafana', ['2020-08-18']); +Object.defineProperty(apiLoader.services['grafana'], '2020-08-18', { + get: function get() { + var model = __nccwpck_require__(29655); + model.paginators = (__nccwpck_require__(83188)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Grafana; + + +/***/ }), + +/***/ 20690: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['greengrass'] = {}; +AWS.Greengrass = Service.defineService('greengrass', ['2017-06-07']); +Object.defineProperty(apiLoader.services['greengrass'], '2017-06-07', { + get: function get() { + var model = __nccwpck_require__(72575); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Greengrass; + + +/***/ }), + +/***/ 45126: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['greengrassv2'] = {}; +AWS.GreengrassV2 = Service.defineService('greengrassv2', ['2020-11-30']); +Object.defineProperty(apiLoader.services['greengrassv2'], '2020-11-30', { + get: function get() { + var model = __nccwpck_require__(57546); + model.paginators = (__nccwpck_require__(47961)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.GreengrassV2; + + +/***/ }), + +/***/ 80494: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['groundstation'] = {}; +AWS.GroundStation = Service.defineService('groundstation', ['2019-05-23']); +Object.defineProperty(apiLoader.services['groundstation'], '2019-05-23', { + get: function get() { + var model = __nccwpck_require__(27733); + model.paginators = (__nccwpck_require__(55974)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(77815)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.GroundStation; + + +/***/ }), + +/***/ 40755: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['guardduty'] = {}; +AWS.GuardDuty = Service.defineService('guardduty', ['2017-11-28']); +Object.defineProperty(apiLoader.services['guardduty'], '2017-11-28', { + get: function get() { + var model = __nccwpck_require__(37793); + model.paginators = (__nccwpck_require__(87510)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.GuardDuty; + + +/***/ }), + +/***/ 21834: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['health'] = {}; +AWS.Health = Service.defineService('health', ['2016-08-04']); +Object.defineProperty(apiLoader.services['health'], '2016-08-04', { + get: function get() { + var model = __nccwpck_require__(8618); + model.paginators = (__nccwpck_require__(46725)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Health; + + +/***/ }), + +/***/ 64254: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['healthlake'] = {}; +AWS.HealthLake = Service.defineService('healthlake', ['2017-07-01']); +Object.defineProperty(apiLoader.services['healthlake'], '2017-07-01', { + get: function get() { + var model = __nccwpck_require__(13637); + model.paginators = (__nccwpck_require__(92834)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.HealthLake; + + +/***/ }), + +/***/ 38889: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['honeycode'] = {}; +AWS.Honeycode = Service.defineService('honeycode', ['2020-03-01']); +Object.defineProperty(apiLoader.services['honeycode'], '2020-03-01', { + get: function get() { + var model = __nccwpck_require__(27577); + model.paginators = (__nccwpck_require__(12243)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Honeycode; + + +/***/ }), + +/***/ 50058: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['iam'] = {}; +AWS.IAM = Service.defineService('iam', ['2010-05-08']); +Object.defineProperty(apiLoader.services['iam'], '2010-05-08', { + get: function get() { + var model = __nccwpck_require__(27041); + model.paginators = (__nccwpck_require__(97583)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(37757)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.IAM; + + +/***/ }), + +/***/ 60222: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['identitystore'] = {}; +AWS.IdentityStore = Service.defineService('identitystore', ['2020-06-15']); +Object.defineProperty(apiLoader.services['identitystore'], '2020-06-15', { + get: function get() { + var model = __nccwpck_require__(75797); + model.paginators = (__nccwpck_require__(44872)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.IdentityStore; + + +/***/ }), + +/***/ 57511: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['imagebuilder'] = {}; +AWS.Imagebuilder = Service.defineService('imagebuilder', ['2019-12-02']); +Object.defineProperty(apiLoader.services['imagebuilder'], '2019-12-02', { + get: function get() { + var model = __nccwpck_require__(98139); + model.paginators = (__nccwpck_require__(60410)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Imagebuilder; + + +/***/ }), + +/***/ 6769: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['importexport'] = {}; +AWS.ImportExport = Service.defineService('importexport', ['2010-06-01']); +Object.defineProperty(apiLoader.services['importexport'], '2010-06-01', { + get: function get() { + var model = __nccwpck_require__(80317); + model.paginators = (__nccwpck_require__(58037)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ImportExport; + + +/***/ }), + +/***/ 89439: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['inspector'] = {}; +AWS.Inspector = Service.defineService('inspector', ['2015-08-18*', '2016-02-16']); +Object.defineProperty(apiLoader.services['inspector'], '2016-02-16', { + get: function get() { + var model = __nccwpck_require__(71649); + model.paginators = (__nccwpck_require__(69242)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Inspector; + + +/***/ }), + +/***/ 98650: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['inspector2'] = {}; +AWS.Inspector2 = Service.defineService('inspector2', ['2020-06-08']); +Object.defineProperty(apiLoader.services['inspector2'], '2020-06-08', { + get: function get() { + var model = __nccwpck_require__(61291); + model.paginators = (__nccwpck_require__(17472)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Inspector2; + + +/***/ }), + +/***/ 25467: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['inspectorscan'] = {}; +AWS.InspectorScan = Service.defineService('inspectorscan', ['2023-08-08']); +Object.defineProperty(apiLoader.services['inspectorscan'], '2023-08-08', { + get: function get() { + var model = __nccwpck_require__(17875); + model.paginators = (__nccwpck_require__(35037)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.InspectorScan; + + +/***/ }), + +/***/ 84099: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['internetmonitor'] = {}; +AWS.InternetMonitor = Service.defineService('internetmonitor', ['2021-06-03']); +Object.defineProperty(apiLoader.services['internetmonitor'], '2021-06-03', { + get: function get() { + var model = __nccwpck_require__(62158); + model.paginators = (__nccwpck_require__(64409)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(76543)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.InternetMonitor; + + +/***/ }), + +/***/ 98392: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['iot'] = {}; +AWS.Iot = Service.defineService('iot', ['2015-05-28']); +Object.defineProperty(apiLoader.services['iot'], '2015-05-28', { + get: function get() { + var model = __nccwpck_require__(40063); + model.paginators = (__nccwpck_require__(43999)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Iot; + + +/***/ }), + +/***/ 39474: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['iot1clickdevicesservice'] = {}; +AWS.IoT1ClickDevicesService = Service.defineService('iot1clickdevicesservice', ['2018-05-14']); +Object.defineProperty(apiLoader.services['iot1clickdevicesservice'], '2018-05-14', { + get: function get() { + var model = __nccwpck_require__(26663); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.IoT1ClickDevicesService; + + +/***/ }), + +/***/ 4686: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['iot1clickprojects'] = {}; +AWS.IoT1ClickProjects = Service.defineService('iot1clickprojects', ['2018-05-14']); +Object.defineProperty(apiLoader.services['iot1clickprojects'], '2018-05-14', { + get: function get() { + var model = __nccwpck_require__(17364); + model.paginators = (__nccwpck_require__(54033)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.IoT1ClickProjects; + + +/***/ }), + +/***/ 67409: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['iotanalytics'] = {}; +AWS.IoTAnalytics = Service.defineService('iotanalytics', ['2017-11-27']); +Object.defineProperty(apiLoader.services['iotanalytics'], '2017-11-27', { + get: function get() { + var model = __nccwpck_require__(84609); + model.paginators = (__nccwpck_require__(45498)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.IoTAnalytics; + + +/***/ }), + +/***/ 6564: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['iotdata'] = {}; +AWS.IotData = Service.defineService('iotdata', ['2015-05-28']); +__nccwpck_require__(27062); +Object.defineProperty(apiLoader.services['iotdata'], '2015-05-28', { + get: function get() { + var model = __nccwpck_require__(21717); + model.paginators = (__nccwpck_require__(31896)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.IotData; + + +/***/ }), + +/***/ 97569: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['iotdeviceadvisor'] = {}; +AWS.IotDeviceAdvisor = Service.defineService('iotdeviceadvisor', ['2020-09-18']); +Object.defineProperty(apiLoader.services['iotdeviceadvisor'], '2020-09-18', { + get: function get() { + var model = __nccwpck_require__(71394); + model.paginators = (__nccwpck_require__(49057)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.IotDeviceAdvisor; + + +/***/ }), + +/***/ 88065: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['iotevents'] = {}; +AWS.IoTEvents = Service.defineService('iotevents', ['2018-07-27']); +Object.defineProperty(apiLoader.services['iotevents'], '2018-07-27', { + get: function get() { + var model = __nccwpck_require__(4483); + model.paginators = (__nccwpck_require__(39844)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.IoTEvents; + + +/***/ }), + +/***/ 56973: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['ioteventsdata'] = {}; +AWS.IoTEventsData = Service.defineService('ioteventsdata', ['2018-10-23']); +Object.defineProperty(apiLoader.services['ioteventsdata'], '2018-10-23', { + get: function get() { + var model = __nccwpck_require__(94282); + model.paginators = (__nccwpck_require__(11632)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.IoTEventsData; + + +/***/ }), + +/***/ 42513: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['iotfleethub'] = {}; +AWS.IoTFleetHub = Service.defineService('iotfleethub', ['2020-11-03']); +Object.defineProperty(apiLoader.services['iotfleethub'], '2020-11-03', { + get: function get() { + var model = __nccwpck_require__(56534); + model.paginators = (__nccwpck_require__(76120)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.IoTFleetHub; + + +/***/ }), + +/***/ 94329: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['iotfleetwise'] = {}; +AWS.IoTFleetWise = Service.defineService('iotfleetwise', ['2021-06-17']); +Object.defineProperty(apiLoader.services['iotfleetwise'], '2021-06-17', { + get: function get() { + var model = __nccwpck_require__(68937); + model.paginators = (__nccwpck_require__(85715)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(23391)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.IoTFleetWise; + + +/***/ }), + +/***/ 42332: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['iotjobsdataplane'] = {}; +AWS.IoTJobsDataPlane = Service.defineService('iotjobsdataplane', ['2017-09-29']); +Object.defineProperty(apiLoader.services['iotjobsdataplane'], '2017-09-29', { + get: function get() { + var model = __nccwpck_require__(12147); + model.paginators = (__nccwpck_require__(58593)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.IoTJobsDataPlane; + + +/***/ }), + +/***/ 22163: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['iotroborunner'] = {}; +AWS.IoTRoboRunner = Service.defineService('iotroborunner', ['2018-05-10']); +Object.defineProperty(apiLoader.services['iotroborunner'], '2018-05-10', { + get: function get() { + var model = __nccwpck_require__(11483); + model.paginators = (__nccwpck_require__(82393)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.IoTRoboRunner; + + +/***/ }), + +/***/ 98562: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['iotsecuretunneling'] = {}; +AWS.IoTSecureTunneling = Service.defineService('iotsecuretunneling', ['2018-10-05']); +Object.defineProperty(apiLoader.services['iotsecuretunneling'], '2018-10-05', { + get: function get() { + var model = __nccwpck_require__(99946); + model.paginators = (__nccwpck_require__(97884)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.IoTSecureTunneling; + + +/***/ }), + +/***/ 89690: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['iotsitewise'] = {}; +AWS.IoTSiteWise = Service.defineService('iotsitewise', ['2019-12-02']); +Object.defineProperty(apiLoader.services['iotsitewise'], '2019-12-02', { + get: function get() { + var model = __nccwpck_require__(44429); + model.paginators = (__nccwpck_require__(27558)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(80458)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.IoTSiteWise; + + +/***/ }), + +/***/ 58905: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['iotthingsgraph'] = {}; +AWS.IoTThingsGraph = Service.defineService('iotthingsgraph', ['2018-09-06']); +Object.defineProperty(apiLoader.services['iotthingsgraph'], '2018-09-06', { + get: function get() { + var model = __nccwpck_require__(84893); + model.paginators = (__nccwpck_require__(99418)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.IoTThingsGraph; + + +/***/ }), + +/***/ 65010: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['iottwinmaker'] = {}; +AWS.IoTTwinMaker = Service.defineService('iottwinmaker', ['2021-11-29']); +Object.defineProperty(apiLoader.services['iottwinmaker'], '2021-11-29', { + get: function get() { + var model = __nccwpck_require__(30382); + model.paginators = (__nccwpck_require__(93389)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(41496)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.IoTTwinMaker; + + +/***/ }), + +/***/ 8226: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['iotwireless'] = {}; +AWS.IoTWireless = Service.defineService('iotwireless', ['2020-11-22']); +Object.defineProperty(apiLoader.services['iotwireless'], '2020-11-22', { + get: function get() { + var model = __nccwpck_require__(78052); + model.paginators = (__nccwpck_require__(13156)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.IoTWireless; + + +/***/ }), + +/***/ 67701: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['ivs'] = {}; +AWS.IVS = Service.defineService('ivs', ['2020-07-14']); +Object.defineProperty(apiLoader.services['ivs'], '2020-07-14', { + get: function get() { + var model = __nccwpck_require__(34175); + model.paginators = (__nccwpck_require__(45289)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.IVS; + + +/***/ }), + +/***/ 17077: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['ivschat'] = {}; +AWS.Ivschat = Service.defineService('ivschat', ['2020-07-14']); +Object.defineProperty(apiLoader.services['ivschat'], '2020-07-14', { + get: function get() { + var model = __nccwpck_require__(77512); + model.paginators = (__nccwpck_require__(85556)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Ivschat; + + +/***/ }), + +/***/ 51946: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['ivsrealtime'] = {}; +AWS.IVSRealTime = Service.defineService('ivsrealtime', ['2020-07-14']); +Object.defineProperty(apiLoader.services['ivsrealtime'], '2020-07-14', { + get: function get() { + var model = __nccwpck_require__(23084); + model.paginators = (__nccwpck_require__(64507)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.IVSRealTime; + + +/***/ }), + +/***/ 56775: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['kafka'] = {}; +AWS.Kafka = Service.defineService('kafka', ['2018-11-14']); +Object.defineProperty(apiLoader.services['kafka'], '2018-11-14', { + get: function get() { + var model = __nccwpck_require__(38473); + model.paginators = (__nccwpck_require__(79729)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Kafka; + + +/***/ }), + +/***/ 61879: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['kafkaconnect'] = {}; +AWS.KafkaConnect = Service.defineService('kafkaconnect', ['2021-09-14']); +Object.defineProperty(apiLoader.services['kafkaconnect'], '2021-09-14', { + get: function get() { + var model = __nccwpck_require__(80867); + model.paginators = (__nccwpck_require__(32924)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.KafkaConnect; + + +/***/ }), + +/***/ 66122: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['kendra'] = {}; +AWS.Kendra = Service.defineService('kendra', ['2019-02-03']); +Object.defineProperty(apiLoader.services['kendra'], '2019-02-03', { + get: function get() { + var model = __nccwpck_require__(80100); + model.paginators = (__nccwpck_require__(64519)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Kendra; + + +/***/ }), + +/***/ 46255: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['kendraranking'] = {}; +AWS.KendraRanking = Service.defineService('kendraranking', ['2022-10-19']); +Object.defineProperty(apiLoader.services['kendraranking'], '2022-10-19', { + get: function get() { + var model = __nccwpck_require__(66044); + model.paginators = (__nccwpck_require__(38563)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.KendraRanking; + + +/***/ }), + +/***/ 24789: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['keyspaces'] = {}; +AWS.Keyspaces = Service.defineService('keyspaces', ['2022-02-10']); +Object.defineProperty(apiLoader.services['keyspaces'], '2022-02-10', { + get: function get() { + var model = __nccwpck_require__(59857); + model.paginators = (__nccwpck_require__(19252)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(53164)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Keyspaces; + + +/***/ }), + +/***/ 49876: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['kinesis'] = {}; +AWS.Kinesis = Service.defineService('kinesis', ['2013-12-02']); +Object.defineProperty(apiLoader.services['kinesis'], '2013-12-02', { + get: function get() { + var model = __nccwpck_require__(648); + model.paginators = (__nccwpck_require__(10424)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(54059)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Kinesis; + + +/***/ }), + +/***/ 90042: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['kinesisanalytics'] = {}; +AWS.KinesisAnalytics = Service.defineService('kinesisanalytics', ['2015-08-14']); +Object.defineProperty(apiLoader.services['kinesisanalytics'], '2015-08-14', { + get: function get() { + var model = __nccwpck_require__(72653); + model.paginators = (__nccwpck_require__(73535)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.KinesisAnalytics; + + +/***/ }), + +/***/ 74631: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['kinesisanalyticsv2'] = {}; +AWS.KinesisAnalyticsV2 = Service.defineService('kinesisanalyticsv2', ['2018-05-23']); +Object.defineProperty(apiLoader.services['kinesisanalyticsv2'], '2018-05-23', { + get: function get() { + var model = __nccwpck_require__(56485); + model.paginators = (__nccwpck_require__(52495)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.KinesisAnalyticsV2; + + +/***/ }), + +/***/ 89927: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['kinesisvideo'] = {}; +AWS.KinesisVideo = Service.defineService('kinesisvideo', ['2017-09-30']); +Object.defineProperty(apiLoader.services['kinesisvideo'], '2017-09-30', { + get: function get() { + var model = __nccwpck_require__(96305); + model.paginators = (__nccwpck_require__(50061)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.KinesisVideo; + + +/***/ }), + +/***/ 5580: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['kinesisvideoarchivedmedia'] = {}; +AWS.KinesisVideoArchivedMedia = Service.defineService('kinesisvideoarchivedmedia', ['2017-09-30']); +Object.defineProperty(apiLoader.services['kinesisvideoarchivedmedia'], '2017-09-30', { + get: function get() { + var model = __nccwpck_require__(78868); + model.paginators = (__nccwpck_require__(27352)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.KinesisVideoArchivedMedia; + + +/***/ }), + +/***/ 81308: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['kinesisvideomedia'] = {}; +AWS.KinesisVideoMedia = Service.defineService('kinesisvideomedia', ['2017-09-30']); +Object.defineProperty(apiLoader.services['kinesisvideomedia'], '2017-09-30', { + get: function get() { + var model = __nccwpck_require__(18898); + model.paginators = (__nccwpck_require__(85061)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.KinesisVideoMedia; + + +/***/ }), + +/***/ 12710: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['kinesisvideosignalingchannels'] = {}; +AWS.KinesisVideoSignalingChannels = Service.defineService('kinesisvideosignalingchannels', ['2019-12-04']); +Object.defineProperty(apiLoader.services['kinesisvideosignalingchannels'], '2019-12-04', { + get: function get() { + var model = __nccwpck_require__(89769); + model.paginators = (__nccwpck_require__(41939)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.KinesisVideoSignalingChannels; + + +/***/ }), + +/***/ 52642: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['kinesisvideowebrtcstorage'] = {}; +AWS.KinesisVideoWebRTCStorage = Service.defineService('kinesisvideowebrtcstorage', ['2018-05-10']); +Object.defineProperty(apiLoader.services['kinesisvideowebrtcstorage'], '2018-05-10', { + get: function get() { + var model = __nccwpck_require__(62761); + model.paginators = (__nccwpck_require__(3540)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.KinesisVideoWebRTCStorage; + + +/***/ }), + +/***/ 56782: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['kms'] = {}; +AWS.KMS = Service.defineService('kms', ['2014-11-01']); +Object.defineProperty(apiLoader.services['kms'], '2014-11-01', { + get: function get() { + var model = __nccwpck_require__(1219); + model.paginators = (__nccwpck_require__(71402)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.KMS; + + +/***/ }), + +/***/ 6726: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['lakeformation'] = {}; +AWS.LakeFormation = Service.defineService('lakeformation', ['2017-03-31']); +Object.defineProperty(apiLoader.services['lakeformation'], '2017-03-31', { + get: function get() { + var model = __nccwpck_require__(82210); + model.paginators = (__nccwpck_require__(61488)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.LakeFormation; + + +/***/ }), + +/***/ 13321: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['lambda'] = {}; +AWS.Lambda = Service.defineService('lambda', ['2014-11-11', '2015-03-31']); +__nccwpck_require__(8452); +Object.defineProperty(apiLoader.services['lambda'], '2014-11-11', { + get: function get() { + var model = __nccwpck_require__(91251); + model.paginators = (__nccwpck_require__(79210)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); +Object.defineProperty(apiLoader.services['lambda'], '2015-03-31', { + get: function get() { + var model = __nccwpck_require__(29103); + model.paginators = (__nccwpck_require__(32057)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(40626)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Lambda; + + +/***/ }), + +/***/ 71060: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['launchwizard'] = {}; +AWS.LaunchWizard = Service.defineService('launchwizard', ['2018-05-10']); +Object.defineProperty(apiLoader.services['launchwizard'], '2018-05-10', { + get: function get() { + var model = __nccwpck_require__(67108); + model.paginators = (__nccwpck_require__(50601)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.LaunchWizard; + + +/***/ }), + +/***/ 37397: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['lexmodelbuildingservice'] = {}; +AWS.LexModelBuildingService = Service.defineService('lexmodelbuildingservice', ['2017-04-19']); +Object.defineProperty(apiLoader.services['lexmodelbuildingservice'], '2017-04-19', { + get: function get() { + var model = __nccwpck_require__(96327); + model.paginators = (__nccwpck_require__(12348)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.LexModelBuildingService; + + +/***/ }), + +/***/ 27254: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['lexmodelsv2'] = {}; +AWS.LexModelsV2 = Service.defineService('lexmodelsv2', ['2020-08-07']); +Object.defineProperty(apiLoader.services['lexmodelsv2'], '2020-08-07', { + get: function get() { + var model = __nccwpck_require__(98781); + model.paginators = (__nccwpck_require__(49461)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(55520)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.LexModelsV2; + + +/***/ }), + +/***/ 62716: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['lexruntime'] = {}; +AWS.LexRuntime = Service.defineService('lexruntime', ['2016-11-28']); +Object.defineProperty(apiLoader.services['lexruntime'], '2016-11-28', { + get: function get() { + var model = __nccwpck_require__(11059); + model.paginators = (__nccwpck_require__(97715)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.LexRuntime; + + +/***/ }), + +/***/ 33855: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['lexruntimev2'] = {}; +AWS.LexRuntimeV2 = Service.defineService('lexruntimev2', ['2020-08-07']); +Object.defineProperty(apiLoader.services['lexruntimev2'], '2020-08-07', { + get: function get() { + var model = __nccwpck_require__(17908); + model.paginators = (__nccwpck_require__(469)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.LexRuntimeV2; + + +/***/ }), + +/***/ 34693: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['licensemanager'] = {}; +AWS.LicenseManager = Service.defineService('licensemanager', ['2018-08-01']); +Object.defineProperty(apiLoader.services['licensemanager'], '2018-08-01', { + get: function get() { + var model = __nccwpck_require__(19160); + model.paginators = (__nccwpck_require__(77552)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.LicenseManager; + + +/***/ }), + +/***/ 52687: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['licensemanagerlinuxsubscriptions'] = {}; +AWS.LicenseManagerLinuxSubscriptions = Service.defineService('licensemanagerlinuxsubscriptions', ['2018-05-10']); +Object.defineProperty(apiLoader.services['licensemanagerlinuxsubscriptions'], '2018-05-10', { + get: function get() { + var model = __nccwpck_require__(94260); + model.paginators = (__nccwpck_require__(60467)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.LicenseManagerLinuxSubscriptions; + + +/***/ }), + +/***/ 37725: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['licensemanagerusersubscriptions'] = {}; +AWS.LicenseManagerUserSubscriptions = Service.defineService('licensemanagerusersubscriptions', ['2018-05-10']); +Object.defineProperty(apiLoader.services['licensemanagerusersubscriptions'], '2018-05-10', { + get: function get() { + var model = __nccwpck_require__(48338); + model.paginators = (__nccwpck_require__(84416)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.LicenseManagerUserSubscriptions; + + +/***/ }), + +/***/ 22718: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['lightsail'] = {}; +AWS.Lightsail = Service.defineService('lightsail', ['2016-11-28']); +Object.defineProperty(apiLoader.services['lightsail'], '2016-11-28', { + get: function get() { + var model = __nccwpck_require__(94784); + model.paginators = (__nccwpck_require__(17528)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Lightsail; + + +/***/ }), + +/***/ 44594: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['location'] = {}; +AWS.Location = Service.defineService('location', ['2020-11-19']); +Object.defineProperty(apiLoader.services['location'], '2020-11-19', { + get: function get() { + var model = __nccwpck_require__(79257); + model.paginators = (__nccwpck_require__(53350)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Location; + + +/***/ }), + +/***/ 21843: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['lookoutequipment'] = {}; +AWS.LookoutEquipment = Service.defineService('lookoutequipment', ['2020-12-15']); +Object.defineProperty(apiLoader.services['lookoutequipment'], '2020-12-15', { + get: function get() { + var model = __nccwpck_require__(50969); + model.paginators = (__nccwpck_require__(92858)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.LookoutEquipment; + + +/***/ }), + +/***/ 78708: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['lookoutmetrics'] = {}; +AWS.LookoutMetrics = Service.defineService('lookoutmetrics', ['2017-07-25']); +Object.defineProperty(apiLoader.services['lookoutmetrics'], '2017-07-25', { + get: function get() { + var model = __nccwpck_require__(37749); + model.paginators = (__nccwpck_require__(13366)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.LookoutMetrics; + + +/***/ }), + +/***/ 65046: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['lookoutvision'] = {}; +AWS.LookoutVision = Service.defineService('lookoutvision', ['2020-11-20']); +Object.defineProperty(apiLoader.services['lookoutvision'], '2020-11-20', { + get: function get() { + var model = __nccwpck_require__(15110); + model.paginators = (__nccwpck_require__(45644)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.LookoutVision; + + +/***/ }), + +/***/ 22482: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['m2'] = {}; +AWS.M2 = Service.defineService('m2', ['2021-04-28']); +Object.defineProperty(apiLoader.services['m2'], '2021-04-28', { + get: function get() { + var model = __nccwpck_require__(21363); + model.paginators = (__nccwpck_require__(96286)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.M2; + + +/***/ }), + +/***/ 82907: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['machinelearning'] = {}; +AWS.MachineLearning = Service.defineService('machinelearning', ['2014-12-12']); +__nccwpck_require__(19174); +Object.defineProperty(apiLoader.services['machinelearning'], '2014-12-12', { + get: function get() { + var model = __nccwpck_require__(4069); + model.paginators = (__nccwpck_require__(95535)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(23194)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MachineLearning; + + +/***/ }), + +/***/ 57330: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['macie2'] = {}; +AWS.Macie2 = Service.defineService('macie2', ['2020-01-01']); +Object.defineProperty(apiLoader.services['macie2'], '2020-01-01', { + get: function get() { + var model = __nccwpck_require__(50847); + model.paginators = (__nccwpck_require__(25947)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(71131)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Macie2; + + +/***/ }), + +/***/ 85143: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['managedblockchain'] = {}; +AWS.ManagedBlockchain = Service.defineService('managedblockchain', ['2018-09-24']); +Object.defineProperty(apiLoader.services['managedblockchain'], '2018-09-24', { + get: function get() { + var model = __nccwpck_require__(31229); + model.paginators = (__nccwpck_require__(57358)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ManagedBlockchain; + + +/***/ }), + +/***/ 51046: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['managedblockchainquery'] = {}; +AWS.ManagedBlockchainQuery = Service.defineService('managedblockchainquery', ['2023-05-04']); +Object.defineProperty(apiLoader.services['managedblockchainquery'], '2023-05-04', { + get: function get() { + var model = __nccwpck_require__(53546); + model.paginators = (__nccwpck_require__(95929)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(17688)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ManagedBlockchainQuery; + + +/***/ }), + +/***/ 50379: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['marketplaceagreement'] = {}; +AWS.MarketplaceAgreement = Service.defineService('marketplaceagreement', ['2020-03-01']); +Object.defineProperty(apiLoader.services['marketplaceagreement'], '2020-03-01', { + get: function get() { + var model = __nccwpck_require__(35188); + model.paginators = (__nccwpck_require__(99220)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MarketplaceAgreement; + + +/***/ }), + +/***/ 2609: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['marketplacecatalog'] = {}; +AWS.MarketplaceCatalog = Service.defineService('marketplacecatalog', ['2018-09-17']); +Object.defineProperty(apiLoader.services['marketplacecatalog'], '2018-09-17', { + get: function get() { + var model = __nccwpck_require__(87122); + model.paginators = (__nccwpck_require__(30187)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MarketplaceCatalog; + + +/***/ }), + +/***/ 4540: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['marketplacecommerceanalytics'] = {}; +AWS.MarketplaceCommerceAnalytics = Service.defineService('marketplacecommerceanalytics', ['2015-07-01']); +Object.defineProperty(apiLoader.services['marketplacecommerceanalytics'], '2015-07-01', { + get: function get() { + var model = __nccwpck_require__(96696); + model.paginators = (__nccwpck_require__(43265)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MarketplaceCommerceAnalytics; + + +/***/ }), + +/***/ 56811: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['marketplacedeployment'] = {}; +AWS.MarketplaceDeployment = Service.defineService('marketplacedeployment', ['2023-01-25']); +Object.defineProperty(apiLoader.services['marketplacedeployment'], '2023-01-25', { + get: function get() { + var model = __nccwpck_require__(9966); + model.paginators = (__nccwpck_require__(31372)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MarketplaceDeployment; + + +/***/ }), + +/***/ 53707: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['marketplaceentitlementservice'] = {}; +AWS.MarketplaceEntitlementService = Service.defineService('marketplaceentitlementservice', ['2017-01-11']); +Object.defineProperty(apiLoader.services['marketplaceentitlementservice'], '2017-01-11', { + get: function get() { + var model = __nccwpck_require__(64253); + model.paginators = (__nccwpck_require__(67012)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MarketplaceEntitlementService; + + +/***/ }), + +/***/ 39297: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['marketplacemetering'] = {}; +AWS.MarketplaceMetering = Service.defineService('marketplacemetering', ['2016-01-14']); +Object.defineProperty(apiLoader.services['marketplacemetering'], '2016-01-14', { + get: function get() { + var model = __nccwpck_require__(43027); + model.paginators = (__nccwpck_require__(4843)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MarketplaceMetering; + + +/***/ }), + +/***/ 67639: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['mediaconnect'] = {}; +AWS.MediaConnect = Service.defineService('mediaconnect', ['2018-11-14']); +Object.defineProperty(apiLoader.services['mediaconnect'], '2018-11-14', { + get: function get() { + var model = __nccwpck_require__(85245); + model.paginators = (__nccwpck_require__(68160)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(42876)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MediaConnect; + + +/***/ }), + +/***/ 57220: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['mediaconvert'] = {}; +AWS.MediaConvert = Service.defineService('mediaconvert', ['2017-08-29']); +Object.defineProperty(apiLoader.services['mediaconvert'], '2017-08-29', { + get: function get() { + var model = __nccwpck_require__(41924); + model.paginators = (__nccwpck_require__(14179)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MediaConvert; + + +/***/ }), + +/***/ 7509: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['medialive'] = {}; +AWS.MediaLive = Service.defineService('medialive', ['2017-10-14']); +Object.defineProperty(apiLoader.services['medialive'], '2017-10-14', { + get: function get() { + var model = __nccwpck_require__(32326); + model.paginators = (__nccwpck_require__(84652)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(17259)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MediaLive; + + +/***/ }), + +/***/ 91620: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['mediapackage'] = {}; +AWS.MediaPackage = Service.defineService('mediapackage', ['2017-10-12']); +Object.defineProperty(apiLoader.services['mediapackage'], '2017-10-12', { + get: function get() { + var model = __nccwpck_require__(51261); + model.paginators = (__nccwpck_require__(48933)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MediaPackage; + + +/***/ }), + +/***/ 53264: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['mediapackagev2'] = {}; +AWS.MediaPackageV2 = Service.defineService('mediapackagev2', ['2022-12-25']); +Object.defineProperty(apiLoader.services['mediapackagev2'], '2022-12-25', { + get: function get() { + var model = __nccwpck_require__(37594); + model.paginators = (__nccwpck_require__(44503)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(68906)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MediaPackageV2; + + +/***/ }), + +/***/ 14962: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['mediapackagevod'] = {}; +AWS.MediaPackageVod = Service.defineService('mediapackagevod', ['2018-11-07']); +Object.defineProperty(apiLoader.services['mediapackagevod'], '2018-11-07', { + get: function get() { + var model = __nccwpck_require__(98877); + model.paginators = (__nccwpck_require__(48422)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MediaPackageVod; + + +/***/ }), + +/***/ 83748: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['mediastore'] = {}; +AWS.MediaStore = Service.defineService('mediastore', ['2017-09-01']); +Object.defineProperty(apiLoader.services['mediastore'], '2017-09-01', { + get: function get() { + var model = __nccwpck_require__(68901); + model.paginators = (__nccwpck_require__(5848)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MediaStore; + + +/***/ }), + +/***/ 98703: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['mediastoredata'] = {}; +AWS.MediaStoreData = Service.defineService('mediastoredata', ['2017-09-01']); +Object.defineProperty(apiLoader.services['mediastoredata'], '2017-09-01', { + get: function get() { + var model = __nccwpck_require__(55081); + model.paginators = (__nccwpck_require__(97948)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MediaStoreData; + + +/***/ }), + +/***/ 99658: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['mediatailor'] = {}; +AWS.MediaTailor = Service.defineService('mediatailor', ['2018-04-23']); +Object.defineProperty(apiLoader.services['mediatailor'], '2018-04-23', { + get: function get() { + var model = __nccwpck_require__(77511); + model.paginators = (__nccwpck_require__(68557)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MediaTailor; + + +/***/ }), + +/***/ 79712: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['medicalimaging'] = {}; +AWS.MedicalImaging = Service.defineService('medicalimaging', ['2023-07-19']); +Object.defineProperty(apiLoader.services['medicalimaging'], '2023-07-19', { + get: function get() { + var model = __nccwpck_require__(46663); + model.paginators = (__nccwpck_require__(63177)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(63171)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MedicalImaging; + + +/***/ }), + +/***/ 50782: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['memorydb'] = {}; +AWS.MemoryDB = Service.defineService('memorydb', ['2021-01-01']); +Object.defineProperty(apiLoader.services['memorydb'], '2021-01-01', { + get: function get() { + var model = __nccwpck_require__(51950); + model.paginators = (__nccwpck_require__(93809)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MemoryDB; + + +/***/ }), + +/***/ 41339: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['mgn'] = {}; +AWS.Mgn = Service.defineService('mgn', ['2020-02-26']); +Object.defineProperty(apiLoader.services['mgn'], '2020-02-26', { + get: function get() { + var model = __nccwpck_require__(65811); + model.paginators = (__nccwpck_require__(52443)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Mgn; + + +/***/ }), + +/***/ 14688: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['migrationhub'] = {}; +AWS.MigrationHub = Service.defineService('migrationhub', ['2017-05-31']); +Object.defineProperty(apiLoader.services['migrationhub'], '2017-05-31', { + get: function get() { + var model = __nccwpck_require__(99161); + model.paginators = (__nccwpck_require__(27903)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MigrationHub; + + +/***/ }), + +/***/ 62658: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['migrationhubconfig'] = {}; +AWS.MigrationHubConfig = Service.defineService('migrationhubconfig', ['2019-06-30']); +Object.defineProperty(apiLoader.services['migrationhubconfig'], '2019-06-30', { + get: function get() { + var model = __nccwpck_require__(59734); + model.paginators = (__nccwpck_require__(51497)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MigrationHubConfig; + + +/***/ }), + +/***/ 66120: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['migrationhuborchestrator'] = {}; +AWS.MigrationHubOrchestrator = Service.defineService('migrationhuborchestrator', ['2021-08-28']); +Object.defineProperty(apiLoader.services['migrationhuborchestrator'], '2021-08-28', { + get: function get() { + var model = __nccwpck_require__(73093); + model.paginators = (__nccwpck_require__(24233)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(83173)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MigrationHubOrchestrator; + + +/***/ }), + +/***/ 2925: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['migrationhubrefactorspaces'] = {}; +AWS.MigrationHubRefactorSpaces = Service.defineService('migrationhubrefactorspaces', ['2021-10-26']); +Object.defineProperty(apiLoader.services['migrationhubrefactorspaces'], '2021-10-26', { + get: function get() { + var model = __nccwpck_require__(17110); + model.paginators = (__nccwpck_require__(63789)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MigrationHubRefactorSpaces; + + +/***/ }), + +/***/ 96533: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['migrationhubstrategy'] = {}; +AWS.MigrationHubStrategy = Service.defineService('migrationhubstrategy', ['2020-02-19']); +Object.defineProperty(apiLoader.services['migrationhubstrategy'], '2020-02-19', { + get: function get() { + var model = __nccwpck_require__(64663); + model.paginators = (__nccwpck_require__(30896)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MigrationHubStrategy; + + +/***/ }), + +/***/ 39782: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['mobile'] = {}; +AWS.Mobile = Service.defineService('mobile', ['2017-07-01']); +Object.defineProperty(apiLoader.services['mobile'], '2017-07-01', { + get: function get() { + var model = __nccwpck_require__(51691); + model.paginators = (__nccwpck_require__(43522)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Mobile; + + +/***/ }), + +/***/ 66690: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['mobileanalytics'] = {}; +AWS.MobileAnalytics = Service.defineService('mobileanalytics', ['2014-06-05']); +Object.defineProperty(apiLoader.services['mobileanalytics'], '2014-06-05', { + get: function get() { + var model = __nccwpck_require__(90338); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MobileAnalytics; + + +/***/ }), + +/***/ 23093: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['mq'] = {}; +AWS.MQ = Service.defineService('mq', ['2017-11-27']); +Object.defineProperty(apiLoader.services['mq'], '2017-11-27', { + get: function get() { + var model = __nccwpck_require__(35102); + model.paginators = (__nccwpck_require__(46095)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MQ; + + +/***/ }), + +/***/ 79954: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['mturk'] = {}; +AWS.MTurk = Service.defineService('mturk', ['2017-01-17']); +Object.defineProperty(apiLoader.services['mturk'], '2017-01-17', { + get: function get() { + var model = __nccwpck_require__(73064); + model.paginators = (__nccwpck_require__(42409)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MTurk; + + +/***/ }), + +/***/ 32712: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['mwaa'] = {}; +AWS.MWAA = Service.defineService('mwaa', ['2020-07-01']); +Object.defineProperty(apiLoader.services['mwaa'], '2020-07-01', { + get: function get() { + var model = __nccwpck_require__(56612); + model.paginators = (__nccwpck_require__(11793)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.MWAA; + + +/***/ }), + +/***/ 30047: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['neptune'] = {}; +AWS.Neptune = Service.defineService('neptune', ['2014-10-31']); +__nccwpck_require__(73090); +Object.defineProperty(apiLoader.services['neptune'], '2014-10-31', { + get: function get() { + var model = __nccwpck_require__(50018); + model.paginators = (__nccwpck_require__(62952)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(8127)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Neptune; + + +/***/ }), + +/***/ 25737: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['neptunedata'] = {}; +AWS.Neptunedata = Service.defineService('neptunedata', ['2023-08-01']); +Object.defineProperty(apiLoader.services['neptunedata'], '2023-08-01', { + get: function get() { + var model = __nccwpck_require__(31008); + model.paginators = (__nccwpck_require__(2363)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Neptunedata; + + +/***/ }), + +/***/ 77598: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['neptunegraph'] = {}; +AWS.NeptuneGraph = Service.defineService('neptunegraph', ['2023-11-29']); +__nccwpck_require__(71963); +Object.defineProperty(apiLoader.services['neptunegraph'], '2023-11-29', { + get: function get() { + var model = __nccwpck_require__(19121); + model.paginators = (__nccwpck_require__(85871)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(91832)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.NeptuneGraph; + + +/***/ }), + +/***/ 84626: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['networkfirewall'] = {}; +AWS.NetworkFirewall = Service.defineService('networkfirewall', ['2020-11-12']); +Object.defineProperty(apiLoader.services['networkfirewall'], '2020-11-12', { + get: function get() { + var model = __nccwpck_require__(63757); + model.paginators = (__nccwpck_require__(74798)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.NetworkFirewall; + + +/***/ }), + +/***/ 37610: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['networkmanager'] = {}; +AWS.NetworkManager = Service.defineService('networkmanager', ['2019-07-05']); +Object.defineProperty(apiLoader.services['networkmanager'], '2019-07-05', { + get: function get() { + var model = __nccwpck_require__(10151); + model.paginators = (__nccwpck_require__(68278)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.NetworkManager; + + +/***/ }), + +/***/ 77614: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['networkmonitor'] = {}; +AWS.NetworkMonitor = Service.defineService('networkmonitor', ['2023-08-01']); +Object.defineProperty(apiLoader.services['networkmonitor'], '2023-08-01', { + get: function get() { + var model = __nccwpck_require__(37278); + model.paginators = (__nccwpck_require__(76488)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(61551)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.NetworkMonitor; + + +/***/ }), + +/***/ 89428: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['nimble'] = {}; +AWS.Nimble = Service.defineService('nimble', ['2020-08-01']); +Object.defineProperty(apiLoader.services['nimble'], '2020-08-01', { + get: function get() { + var model = __nccwpck_require__(50605); + model.paginators = (__nccwpck_require__(65300)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(42486)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Nimble; + + +/***/ }), + +/***/ 9319: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['oam'] = {}; +AWS.OAM = Service.defineService('oam', ['2022-06-10']); +Object.defineProperty(apiLoader.services['oam'], '2022-06-10', { + get: function get() { + var model = __nccwpck_require__(13463); + model.paginators = (__nccwpck_require__(55717)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.OAM; + + +/***/ }), + +/***/ 75114: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['omics'] = {}; +AWS.Omics = Service.defineService('omics', ['2022-11-28']); +Object.defineProperty(apiLoader.services['omics'], '2022-11-28', { + get: function get() { + var model = __nccwpck_require__(74258); + model.paginators = (__nccwpck_require__(78278)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(31165)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Omics; + + +/***/ }), + +/***/ 60358: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['opensearch'] = {}; +AWS.OpenSearch = Service.defineService('opensearch', ['2021-01-01']); +Object.defineProperty(apiLoader.services['opensearch'], '2021-01-01', { + get: function get() { + var model = __nccwpck_require__(90583); + model.paginators = (__nccwpck_require__(32668)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.OpenSearch; + + +/***/ }), + +/***/ 86277: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['opensearchserverless'] = {}; +AWS.OpenSearchServerless = Service.defineService('opensearchserverless', ['2021-11-01']); +Object.defineProperty(apiLoader.services['opensearchserverless'], '2021-11-01', { + get: function get() { + var model = __nccwpck_require__(61668); + model.paginators = (__nccwpck_require__(68785)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.OpenSearchServerless; + + +/***/ }), + +/***/ 75691: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['opsworks'] = {}; +AWS.OpsWorks = Service.defineService('opsworks', ['2013-02-18']); +Object.defineProperty(apiLoader.services['opsworks'], '2013-02-18', { + get: function get() { + var model = __nccwpck_require__(22805); + model.paginators = (__nccwpck_require__(24750)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(74961)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.OpsWorks; + + +/***/ }), + +/***/ 80388: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['opsworkscm'] = {}; +AWS.OpsWorksCM = Service.defineService('opsworkscm', ['2016-11-01']); +Object.defineProperty(apiLoader.services['opsworkscm'], '2016-11-01', { + get: function get() { + var model = __nccwpck_require__(56705); + model.paginators = (__nccwpck_require__(49463)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(65003)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.OpsWorksCM; + + +/***/ }), + +/***/ 52560: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['organizations'] = {}; +AWS.Organizations = Service.defineService('organizations', ['2016-11-28']); +Object.defineProperty(apiLoader.services['organizations'], '2016-11-28', { + get: function get() { + var model = __nccwpck_require__(58874); + model.paginators = (__nccwpck_require__(43261)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Organizations; + + +/***/ }), + +/***/ 98021: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['osis'] = {}; +AWS.OSIS = Service.defineService('osis', ['2022-01-01']); +Object.defineProperty(apiLoader.services['osis'], '2022-01-01', { + get: function get() { + var model = __nccwpck_require__(51838); + model.paginators = (__nccwpck_require__(72472)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.OSIS; + + +/***/ }), + +/***/ 27551: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['outposts'] = {}; +AWS.Outposts = Service.defineService('outposts', ['2019-12-03']); +Object.defineProperty(apiLoader.services['outposts'], '2019-12-03', { + get: function get() { + var model = __nccwpck_require__(4807); + model.paginators = (__nccwpck_require__(3364)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Outposts; + + +/***/ }), + +/***/ 20368: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['panorama'] = {}; +AWS.Panorama = Service.defineService('panorama', ['2019-07-24']); +Object.defineProperty(apiLoader.services['panorama'], '2019-07-24', { + get: function get() { + var model = __nccwpck_require__(91489); + model.paginators = (__nccwpck_require__(77238)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Panorama; + + +/***/ }), + +/***/ 11594: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['paymentcryptography'] = {}; +AWS.PaymentCryptography = Service.defineService('paymentcryptography', ['2021-09-14']); +Object.defineProperty(apiLoader.services['paymentcryptography'], '2021-09-14', { + get: function get() { + var model = __nccwpck_require__(86072); + model.paginators = (__nccwpck_require__(17819)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.PaymentCryptography; + + +/***/ }), + +/***/ 96559: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['paymentcryptographydata'] = {}; +AWS.PaymentCryptographyData = Service.defineService('paymentcryptographydata', ['2022-02-03']); +Object.defineProperty(apiLoader.services['paymentcryptographydata'], '2022-02-03', { + get: function get() { + var model = __nccwpck_require__(68578); + model.paginators = (__nccwpck_require__(89757)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.PaymentCryptographyData; + + +/***/ }), + +/***/ 55959: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['pcaconnectorad'] = {}; +AWS.PcaConnectorAd = Service.defineService('pcaconnectorad', ['2018-05-10']); +Object.defineProperty(apiLoader.services['pcaconnectorad'], '2018-05-10', { + get: function get() { + var model = __nccwpck_require__(6901); + model.paginators = (__nccwpck_require__(33158)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.PcaConnectorAd; + + +/***/ }), + +/***/ 33696: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['personalize'] = {}; +AWS.Personalize = Service.defineService('personalize', ['2018-05-22']); +Object.defineProperty(apiLoader.services['personalize'], '2018-05-22', { + get: function get() { + var model = __nccwpck_require__(70169); + model.paginators = (__nccwpck_require__(64441)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Personalize; + + +/***/ }), + +/***/ 88170: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['personalizeevents'] = {}; +AWS.PersonalizeEvents = Service.defineService('personalizeevents', ['2018-03-22']); +Object.defineProperty(apiLoader.services['personalizeevents'], '2018-03-22', { + get: function get() { + var model = __nccwpck_require__(3606); + model.paginators = (__nccwpck_require__(94507)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.PersonalizeEvents; + + +/***/ }), + +/***/ 66184: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['personalizeruntime'] = {}; +AWS.PersonalizeRuntime = Service.defineService('personalizeruntime', ['2018-05-22']); +Object.defineProperty(apiLoader.services['personalizeruntime'], '2018-05-22', { + get: function get() { + var model = __nccwpck_require__(18824); + model.paginators = (__nccwpck_require__(8069)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.PersonalizeRuntime; + + +/***/ }), + +/***/ 15505: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['pi'] = {}; +AWS.PI = Service.defineService('pi', ['2018-02-27']); +Object.defineProperty(apiLoader.services['pi'], '2018-02-27', { + get: function get() { + var model = __nccwpck_require__(18761); + model.paginators = (__nccwpck_require__(84882)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.PI; + + +/***/ }), + +/***/ 18388: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['pinpoint'] = {}; +AWS.Pinpoint = Service.defineService('pinpoint', ['2016-12-01']); +Object.defineProperty(apiLoader.services['pinpoint'], '2016-12-01', { + get: function get() { + var model = __nccwpck_require__(40605); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Pinpoint; + + +/***/ }), + +/***/ 83060: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['pinpointemail'] = {}; +AWS.PinpointEmail = Service.defineService('pinpointemail', ['2018-07-26']); +Object.defineProperty(apiLoader.services['pinpointemail'], '2018-07-26', { + get: function get() { + var model = __nccwpck_require__(55228); + model.paginators = (__nccwpck_require__(45172)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.PinpointEmail; + + +/***/ }), + +/***/ 46605: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['pinpointsmsvoice'] = {}; +AWS.PinpointSMSVoice = Service.defineService('pinpointsmsvoice', ['2018-09-05']); +Object.defineProperty(apiLoader.services['pinpointsmsvoice'], '2018-09-05', { + get: function get() { + var model = __nccwpck_require__(98689); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.PinpointSMSVoice; + + +/***/ }), + +/***/ 478: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['pinpointsmsvoicev2'] = {}; +AWS.PinpointSMSVoiceV2 = Service.defineService('pinpointsmsvoicev2', ['2022-03-31']); +Object.defineProperty(apiLoader.services['pinpointsmsvoicev2'], '2022-03-31', { + get: function get() { + var model = __nccwpck_require__(88319); + model.paginators = (__nccwpck_require__(80650)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(6663)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.PinpointSMSVoiceV2; + + +/***/ }), + +/***/ 14220: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['pipes'] = {}; +AWS.Pipes = Service.defineService('pipes', ['2015-10-07']); +Object.defineProperty(apiLoader.services['pipes'], '2015-10-07', { + get: function get() { + var model = __nccwpck_require__(40616); + model.paginators = (__nccwpck_require__(17710)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Pipes; + + +/***/ }), + +/***/ 97332: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['polly'] = {}; +AWS.Polly = Service.defineService('polly', ['2016-06-10']); +__nccwpck_require__(53199); +Object.defineProperty(apiLoader.services['polly'], '2016-06-10', { + get: function get() { + var model = __nccwpck_require__(55078); + model.paginators = (__nccwpck_require__(77060)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Polly; + + +/***/ }), + +/***/ 92765: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['pricing'] = {}; +AWS.Pricing = Service.defineService('pricing', ['2017-10-15']); +Object.defineProperty(apiLoader.services['pricing'], '2017-10-15', { + get: function get() { + var model = __nccwpck_require__(22484); + model.paginators = (__nccwpck_require__(60369)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(41996)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Pricing; + + +/***/ }), + +/***/ 63088: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['privatenetworks'] = {}; +AWS.PrivateNetworks = Service.defineService('privatenetworks', ['2021-12-03']); +Object.defineProperty(apiLoader.services['privatenetworks'], '2021-12-03', { + get: function get() { + var model = __nccwpck_require__(46306); + model.paginators = (__nccwpck_require__(42771)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.PrivateNetworks; + + +/***/ }), + +/***/ 9275: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['proton'] = {}; +AWS.Proton = Service.defineService('proton', ['2020-07-20']); +Object.defineProperty(apiLoader.services['proton'], '2020-07-20', { + get: function get() { + var model = __nccwpck_require__(78577); + model.paginators = (__nccwpck_require__(14299)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(99338)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Proton; + + +/***/ }), + +/***/ 26842: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['qbusiness'] = {}; +AWS.QBusiness = Service.defineService('qbusiness', ['2023-11-27']); +Object.defineProperty(apiLoader.services['qbusiness'], '2023-11-27', { + get: function get() { + var model = __nccwpck_require__(12388); + model.paginators = (__nccwpck_require__(51051)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.QBusiness; + + +/***/ }), + +/***/ 39094: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['qconnect'] = {}; +AWS.QConnect = Service.defineService('qconnect', ['2020-10-19']); +Object.defineProperty(apiLoader.services['qconnect'], '2020-10-19', { + get: function get() { + var model = __nccwpck_require__(72266); + model.paginators = (__nccwpck_require__(95945)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.QConnect; + + +/***/ }), + +/***/ 71266: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['qldb'] = {}; +AWS.QLDB = Service.defineService('qldb', ['2019-01-02']); +Object.defineProperty(apiLoader.services['qldb'], '2019-01-02', { + get: function get() { + var model = __nccwpck_require__(71346); + model.paginators = (__nccwpck_require__(34265)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.QLDB; + + +/***/ }), + +/***/ 55423: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['qldbsession'] = {}; +AWS.QLDBSession = Service.defineService('qldbsession', ['2019-07-11']); +Object.defineProperty(apiLoader.services['qldbsession'], '2019-07-11', { + get: function get() { + var model = __nccwpck_require__(60040); + model.paginators = (__nccwpck_require__(61051)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.QLDBSession; + + +/***/ }), + +/***/ 29898: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['quicksight'] = {}; +AWS.QuickSight = Service.defineService('quicksight', ['2018-04-01']); +Object.defineProperty(apiLoader.services['quicksight'], '2018-04-01', { + get: function get() { + var model = __nccwpck_require__(8419); + model.paginators = (__nccwpck_require__(43387)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.QuickSight; + + +/***/ }), + +/***/ 94394: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['ram'] = {}; +AWS.RAM = Service.defineService('ram', ['2018-01-04']); +Object.defineProperty(apiLoader.services['ram'], '2018-01-04', { + get: function get() { + var model = __nccwpck_require__(61375); + model.paginators = (__nccwpck_require__(85336)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.RAM; + + +/***/ }), + +/***/ 70145: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['rbin'] = {}; +AWS.Rbin = Service.defineService('rbin', ['2021-06-15']); +Object.defineProperty(apiLoader.services['rbin'], '2021-06-15', { + get: function get() { + var model = __nccwpck_require__(18897); + model.paginators = (__nccwpck_require__(57601)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Rbin; + + +/***/ }), + +/***/ 71578: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['rds'] = {}; +AWS.RDS = Service.defineService('rds', ['2013-01-10', '2013-02-12', '2013-09-09', '2014-09-01', '2014-09-01*', '2014-10-31']); +__nccwpck_require__(71928); +Object.defineProperty(apiLoader.services['rds'], '2013-01-10', { + get: function get() { + var model = __nccwpck_require__(59989); + model.paginators = (__nccwpck_require__(978)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); +Object.defineProperty(apiLoader.services['rds'], '2013-02-12', { + get: function get() { + var model = __nccwpck_require__(55061); + model.paginators = (__nccwpck_require__(39581)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); +Object.defineProperty(apiLoader.services['rds'], '2013-09-09', { + get: function get() { + var model = __nccwpck_require__(36331); + model.paginators = (__nccwpck_require__(14485)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(36851)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); +Object.defineProperty(apiLoader.services['rds'], '2014-09-01', { + get: function get() { + var model = __nccwpck_require__(19226); + model.paginators = (__nccwpck_require__(49863)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); +Object.defineProperty(apiLoader.services['rds'], '2014-10-31', { + get: function get() { + var model = __nccwpck_require__(91916); + model.paginators = (__nccwpck_require__(85082)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(20371)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.RDS; + + +/***/ }), + +/***/ 30147: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['rdsdataservice'] = {}; +AWS.RDSDataService = Service.defineService('rdsdataservice', ['2018-08-01']); +__nccwpck_require__(64070); +Object.defineProperty(apiLoader.services['rdsdataservice'], '2018-08-01', { + get: function get() { + var model = __nccwpck_require__(13559); + model.paginators = (__nccwpck_require__(41160)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.RDSDataService; + + +/***/ }), + +/***/ 84853: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['redshift'] = {}; +AWS.Redshift = Service.defineService('redshift', ['2012-12-01']); +Object.defineProperty(apiLoader.services['redshift'], '2012-12-01', { + get: function get() { + var model = __nccwpck_require__(24827); + model.paginators = (__nccwpck_require__(88012)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(79011)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Redshift; + + +/***/ }), + +/***/ 203: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['redshiftdata'] = {}; +AWS.RedshiftData = Service.defineService('redshiftdata', ['2019-12-20']); +Object.defineProperty(apiLoader.services['redshiftdata'], '2019-12-20', { + get: function get() { + var model = __nccwpck_require__(85203); + model.paginators = (__nccwpck_require__(27797)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.RedshiftData; + + +/***/ }), + +/***/ 29987: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['redshiftserverless'] = {}; +AWS.RedshiftServerless = Service.defineService('redshiftserverless', ['2021-04-21']); +Object.defineProperty(apiLoader.services['redshiftserverless'], '2021-04-21', { + get: function get() { + var model = __nccwpck_require__(95705); + model.paginators = (__nccwpck_require__(892)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.RedshiftServerless; + + +/***/ }), + +/***/ 65470: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['rekognition'] = {}; +AWS.Rekognition = Service.defineService('rekognition', ['2016-06-27']); +Object.defineProperty(apiLoader.services['rekognition'], '2016-06-27', { + get: function get() { + var model = __nccwpck_require__(66442); + model.paginators = (__nccwpck_require__(37753)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(78910)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Rekognition; + + +/***/ }), + +/***/ 21154: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['repostspace'] = {}; +AWS.Repostspace = Service.defineService('repostspace', ['2022-05-13']); +Object.defineProperty(apiLoader.services['repostspace'], '2022-05-13', { + get: function get() { + var model = __nccwpck_require__(59766); + model.paginators = (__nccwpck_require__(10997)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Repostspace; + + +/***/ }), + +/***/ 21173: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['resiliencehub'] = {}; +AWS.Resiliencehub = Service.defineService('resiliencehub', ['2020-04-30']); +Object.defineProperty(apiLoader.services['resiliencehub'], '2020-04-30', { + get: function get() { + var model = __nccwpck_require__(3885); + model.paginators = (__nccwpck_require__(38750)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Resiliencehub; + + +/***/ }), + +/***/ 74071: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['resourceexplorer2'] = {}; +AWS.ResourceExplorer2 = Service.defineService('resourceexplorer2', ['2022-07-28']); +Object.defineProperty(apiLoader.services['resourceexplorer2'], '2022-07-28', { + get: function get() { + var model = __nccwpck_require__(26515); + model.paginators = (__nccwpck_require__(8580)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ResourceExplorer2; + + +/***/ }), + +/***/ 58756: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['resourcegroups'] = {}; +AWS.ResourceGroups = Service.defineService('resourcegroups', ['2017-11-27']); +Object.defineProperty(apiLoader.services['resourcegroups'], '2017-11-27', { + get: function get() { + var model = __nccwpck_require__(73621); + model.paginators = (__nccwpck_require__(24085)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ResourceGroups; + + +/***/ }), + +/***/ 7385: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['resourcegroupstaggingapi'] = {}; +AWS.ResourceGroupsTaggingAPI = Service.defineService('resourcegroupstaggingapi', ['2017-01-26']); +Object.defineProperty(apiLoader.services['resourcegroupstaggingapi'], '2017-01-26', { + get: function get() { + var model = __nccwpck_require__(71720); + model.paginators = (__nccwpck_require__(36635)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ResourceGroupsTaggingAPI; + + +/***/ }), + +/***/ 18068: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['robomaker'] = {}; +AWS.RoboMaker = Service.defineService('robomaker', ['2018-06-29']); +Object.defineProperty(apiLoader.services['robomaker'], '2018-06-29', { + get: function get() { + var model = __nccwpck_require__(6904); + model.paginators = (__nccwpck_require__(43495)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.RoboMaker; + + +/***/ }), + +/***/ 83604: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['rolesanywhere'] = {}; +AWS.RolesAnywhere = Service.defineService('rolesanywhere', ['2018-05-10']); +Object.defineProperty(apiLoader.services['rolesanywhere'], '2018-05-10', { + get: function get() { + var model = __nccwpck_require__(80801); + model.paginators = (__nccwpck_require__(65955)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.RolesAnywhere; + + +/***/ }), + +/***/ 44968: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['route53'] = {}; +AWS.Route53 = Service.defineService('route53', ['2013-04-01']); +__nccwpck_require__(69627); +Object.defineProperty(apiLoader.services['route53'], '2013-04-01', { + get: function get() { + var model = __nccwpck_require__(20959); + model.paginators = (__nccwpck_require__(46456)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(28347)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Route53; + + +/***/ }), + +/***/ 51994: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['route53domains'] = {}; +AWS.Route53Domains = Service.defineService('route53domains', ['2014-05-15']); +Object.defineProperty(apiLoader.services['route53domains'], '2014-05-15', { + get: function get() { + var model = __nccwpck_require__(57598); + model.paginators = (__nccwpck_require__(52189)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Route53Domains; + + +/***/ }), + +/***/ 35738: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['route53recoverycluster'] = {}; +AWS.Route53RecoveryCluster = Service.defineService('route53recoverycluster', ['2019-12-02']); +Object.defineProperty(apiLoader.services['route53recoverycluster'], '2019-12-02', { + get: function get() { + var model = __nccwpck_require__(73989); + model.paginators = (__nccwpck_require__(69118)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Route53RecoveryCluster; + + +/***/ }), + +/***/ 16063: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['route53recoverycontrolconfig'] = {}; +AWS.Route53RecoveryControlConfig = Service.defineService('route53recoverycontrolconfig', ['2020-11-02']); +Object.defineProperty(apiLoader.services['route53recoverycontrolconfig'], '2020-11-02', { + get: function get() { + var model = __nccwpck_require__(38334); + model.paginators = (__nccwpck_require__(19728)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(57184)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Route53RecoveryControlConfig; + + +/***/ }), + +/***/ 79106: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['route53recoveryreadiness'] = {}; +AWS.Route53RecoveryReadiness = Service.defineService('route53recoveryreadiness', ['2019-12-02']); +Object.defineProperty(apiLoader.services['route53recoveryreadiness'], '2019-12-02', { + get: function get() { + var model = __nccwpck_require__(40156); + model.paginators = (__nccwpck_require__(96969)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Route53RecoveryReadiness; + + +/***/ }), + +/***/ 25894: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['route53resolver'] = {}; +AWS.Route53Resolver = Service.defineService('route53resolver', ['2018-04-01']); +Object.defineProperty(apiLoader.services['route53resolver'], '2018-04-01', { + get: function get() { + var model = __nccwpck_require__(89229); + model.paginators = (__nccwpck_require__(95050)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Route53Resolver; + + +/***/ }), + +/***/ 53237: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['rum'] = {}; +AWS.RUM = Service.defineService('rum', ['2018-05-10']); +Object.defineProperty(apiLoader.services['rum'], '2018-05-10', { + get: function get() { + var model = __nccwpck_require__(84126); + model.paginators = (__nccwpck_require__(79432)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.RUM; + + +/***/ }), + +/***/ 83256: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['s3'] = {}; +AWS.S3 = Service.defineService('s3', ['2006-03-01']); +__nccwpck_require__(26543); +Object.defineProperty(apiLoader.services['s3'], '2006-03-01', { + get: function get() { + var model = __nccwpck_require__(1129); + model.paginators = (__nccwpck_require__(7265)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(74048)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.S3; + + +/***/ }), + +/***/ 99817: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['s3control'] = {}; +AWS.S3Control = Service.defineService('s3control', ['2018-08-20']); +__nccwpck_require__(71207); +Object.defineProperty(apiLoader.services['s3control'], '2018-08-20', { + get: function get() { + var model = __nccwpck_require__(1201); + model.paginators = (__nccwpck_require__(55527)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.S3Control; + + +/***/ }), + +/***/ 90493: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['s3outposts'] = {}; +AWS.S3Outposts = Service.defineService('s3outposts', ['2017-07-25']); +Object.defineProperty(apiLoader.services['s3outposts'], '2017-07-25', { + get: function get() { + var model = __nccwpck_require__(79971); + model.paginators = (__nccwpck_require__(32505)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.S3Outposts; + + +/***/ }), + +/***/ 77657: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['sagemaker'] = {}; +AWS.SageMaker = Service.defineService('sagemaker', ['2017-07-24']); +Object.defineProperty(apiLoader.services['sagemaker'], '2017-07-24', { + get: function get() { + var model = __nccwpck_require__(71132); + model.paginators = (__nccwpck_require__(69254)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(80824)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SageMaker; + + +/***/ }), + +/***/ 38966: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['sagemakeredge'] = {}; +AWS.SagemakerEdge = Service.defineService('sagemakeredge', ['2020-09-23']); +Object.defineProperty(apiLoader.services['sagemakeredge'], '2020-09-23', { + get: function get() { + var model = __nccwpck_require__(97093); + model.paginators = (__nccwpck_require__(71636)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SagemakerEdge; + + +/***/ }), + +/***/ 67644: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['sagemakerfeaturestoreruntime'] = {}; +AWS.SageMakerFeatureStoreRuntime = Service.defineService('sagemakerfeaturestoreruntime', ['2020-07-01']); +Object.defineProperty(apiLoader.services['sagemakerfeaturestoreruntime'], '2020-07-01', { + get: function get() { + var model = __nccwpck_require__(75546); + model.paginators = (__nccwpck_require__(12151)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SageMakerFeatureStoreRuntime; + + +/***/ }), + +/***/ 4707: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['sagemakergeospatial'] = {}; +AWS.SageMakerGeospatial = Service.defineService('sagemakergeospatial', ['2020-05-27']); +Object.defineProperty(apiLoader.services['sagemakergeospatial'], '2020-05-27', { + get: function get() { + var model = __nccwpck_require__(26059); + model.paginators = (__nccwpck_require__(99606)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SageMakerGeospatial; + + +/***/ }), + +/***/ 28199: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['sagemakermetrics'] = {}; +AWS.SageMakerMetrics = Service.defineService('sagemakermetrics', ['2022-09-30']); +Object.defineProperty(apiLoader.services['sagemakermetrics'], '2022-09-30', { + get: function get() { + var model = __nccwpck_require__(89834); + model.paginators = (__nccwpck_require__(80107)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SageMakerMetrics; + + +/***/ }), + +/***/ 85044: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['sagemakerruntime'] = {}; +AWS.SageMakerRuntime = Service.defineService('sagemakerruntime', ['2017-05-13']); +Object.defineProperty(apiLoader.services['sagemakerruntime'], '2017-05-13', { + get: function get() { + var model = __nccwpck_require__(27032); + model.paginators = (__nccwpck_require__(7570)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SageMakerRuntime; + + +/***/ }), + +/***/ 62825: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['savingsplans'] = {}; +AWS.SavingsPlans = Service.defineService('savingsplans', ['2019-06-28']); +Object.defineProperty(apiLoader.services['savingsplans'], '2019-06-28', { + get: function get() { + var model = __nccwpck_require__(46879); + model.paginators = (__nccwpck_require__(78998)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SavingsPlans; + + +/***/ }), + +/***/ 94840: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['scheduler'] = {}; +AWS.Scheduler = Service.defineService('scheduler', ['2021-06-30']); +Object.defineProperty(apiLoader.services['scheduler'], '2021-06-30', { + get: function get() { + var model = __nccwpck_require__(36876); + model.paginators = (__nccwpck_require__(54594)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Scheduler; + + +/***/ }), + +/***/ 55713: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['schemas'] = {}; +AWS.Schemas = Service.defineService('schemas', ['2019-12-02']); +Object.defineProperty(apiLoader.services['schemas'], '2019-12-02', { + get: function get() { + var model = __nccwpck_require__(76626); + model.paginators = (__nccwpck_require__(34227)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(62213)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Schemas; + + +/***/ }), + +/***/ 85131: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['secretsmanager'] = {}; +AWS.SecretsManager = Service.defineService('secretsmanager', ['2017-10-17']); +Object.defineProperty(apiLoader.services['secretsmanager'], '2017-10-17', { + get: function get() { + var model = __nccwpck_require__(89470); + model.paginators = (__nccwpck_require__(25613)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SecretsManager; + + +/***/ }), + +/***/ 21550: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['securityhub'] = {}; +AWS.SecurityHub = Service.defineService('securityhub', ['2018-10-26']); +Object.defineProperty(apiLoader.services['securityhub'], '2018-10-26', { + get: function get() { + var model = __nccwpck_require__(29208); + model.paginators = (__nccwpck_require__(85595)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SecurityHub; + + +/***/ }), + +/***/ 84296: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['securitylake'] = {}; +AWS.SecurityLake = Service.defineService('securitylake', ['2018-05-10']); +Object.defineProperty(apiLoader.services['securitylake'], '2018-05-10', { + get: function get() { + var model = __nccwpck_require__(26935); + model.paginators = (__nccwpck_require__(42170)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SecurityLake; + + +/***/ }), + +/***/ 62402: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['serverlessapplicationrepository'] = {}; +AWS.ServerlessApplicationRepository = Service.defineService('serverlessapplicationrepository', ['2017-09-08']); +Object.defineProperty(apiLoader.services['serverlessapplicationrepository'], '2017-09-08', { + get: function get() { + var model = __nccwpck_require__(68422); + model.paginators = (__nccwpck_require__(34864)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ServerlessApplicationRepository; + + +/***/ }), + +/***/ 822: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['servicecatalog'] = {}; +AWS.ServiceCatalog = Service.defineService('servicecatalog', ['2015-12-10']); +Object.defineProperty(apiLoader.services['servicecatalog'], '2015-12-10', { + get: function get() { + var model = __nccwpck_require__(95500); + model.paginators = (__nccwpck_require__(21687)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ServiceCatalog; + + +/***/ }), + +/***/ 79068: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['servicecatalogappregistry'] = {}; +AWS.ServiceCatalogAppRegistry = Service.defineService('servicecatalogappregistry', ['2020-06-24']); +Object.defineProperty(apiLoader.services['servicecatalogappregistry'], '2020-06-24', { + get: function get() { + var model = __nccwpck_require__(25697); + model.paginators = (__nccwpck_require__(28893)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ServiceCatalogAppRegistry; + + +/***/ }), + +/***/ 91569: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['servicediscovery'] = {}; +AWS.ServiceDiscovery = Service.defineService('servicediscovery', ['2017-03-14']); +Object.defineProperty(apiLoader.services['servicediscovery'], '2017-03-14', { + get: function get() { + var model = __nccwpck_require__(22361); + model.paginators = (__nccwpck_require__(37798)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ServiceDiscovery; + + +/***/ }), + +/***/ 57800: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['servicequotas'] = {}; +AWS.ServiceQuotas = Service.defineService('servicequotas', ['2019-06-24']); +Object.defineProperty(apiLoader.services['servicequotas'], '2019-06-24', { + get: function get() { + var model = __nccwpck_require__(68850); + model.paginators = (__nccwpck_require__(63074)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.ServiceQuotas; + + +/***/ }), + +/***/ 46816: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['ses'] = {}; +AWS.SES = Service.defineService('ses', ['2010-12-01']); +Object.defineProperty(apiLoader.services['ses'], '2010-12-01', { + get: function get() { + var model = __nccwpck_require__(56693); + model.paginators = (__nccwpck_require__(9399)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(98229)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SES; + + +/***/ }), + +/***/ 20142: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['sesv2'] = {}; +AWS.SESV2 = Service.defineService('sesv2', ['2019-09-27']); +Object.defineProperty(apiLoader.services['sesv2'], '2019-09-27', { + get: function get() { + var model = __nccwpck_require__(69754); + model.paginators = (__nccwpck_require__(72405)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SESV2; + + +/***/ }), + +/***/ 20271: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['shield'] = {}; +AWS.Shield = Service.defineService('shield', ['2016-06-02']); +Object.defineProperty(apiLoader.services['shield'], '2016-06-02', { + get: function get() { + var model = __nccwpck_require__(47061); + model.paginators = (__nccwpck_require__(54893)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Shield; + + +/***/ }), + +/***/ 71596: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['signer'] = {}; +AWS.Signer = Service.defineService('signer', ['2017-08-25']); +Object.defineProperty(apiLoader.services['signer'], '2017-08-25', { + get: function get() { + var model = __nccwpck_require__(97116); + model.paginators = (__nccwpck_require__(81027)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(48215)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Signer; + + +/***/ }), + +/***/ 10120: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['simpledb'] = {}; +AWS.SimpleDB = Service.defineService('simpledb', ['2009-04-15']); +Object.defineProperty(apiLoader.services['simpledb'], '2009-04-15', { + get: function get() { + var model = __nccwpck_require__(45164); + model.paginators = (__nccwpck_require__(55255)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SimpleDB; + + +/***/ }), + +/***/ 37090: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['simspaceweaver'] = {}; +AWS.SimSpaceWeaver = Service.defineService('simspaceweaver', ['2022-10-28']); +Object.defineProperty(apiLoader.services['simspaceweaver'], '2022-10-28', { + get: function get() { + var model = __nccwpck_require__(92139); + model.paginators = (__nccwpck_require__(31849)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SimSpaceWeaver; + + +/***/ }), + +/***/ 57719: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['sms'] = {}; +AWS.SMS = Service.defineService('sms', ['2016-10-24']); +Object.defineProperty(apiLoader.services['sms'], '2016-10-24', { + get: function get() { + var model = __nccwpck_require__(26534); + model.paginators = (__nccwpck_require__(98730)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SMS; + + +/***/ }), + +/***/ 510: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['snowball'] = {}; +AWS.Snowball = Service.defineService('snowball', ['2016-06-30']); +Object.defineProperty(apiLoader.services['snowball'], '2016-06-30', { + get: function get() { + var model = __nccwpck_require__(96822); + model.paginators = (__nccwpck_require__(45219)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Snowball; + + +/***/ }), + +/***/ 64655: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['snowdevicemanagement'] = {}; +AWS.SnowDeviceManagement = Service.defineService('snowdevicemanagement', ['2021-08-04']); +Object.defineProperty(apiLoader.services['snowdevicemanagement'], '2021-08-04', { + get: function get() { + var model = __nccwpck_require__(97413); + model.paginators = (__nccwpck_require__(70424)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SnowDeviceManagement; + + +/***/ }), + +/***/ 28581: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['sns'] = {}; +AWS.SNS = Service.defineService('sns', ['2010-03-31']); +Object.defineProperty(apiLoader.services['sns'], '2010-03-31', { + get: function get() { + var model = __nccwpck_require__(64387); + model.paginators = (__nccwpck_require__(58054)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SNS; + + +/***/ }), + +/***/ 63172: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['sqs'] = {}; +AWS.SQS = Service.defineService('sqs', ['2012-11-05']); +__nccwpck_require__(94571); +Object.defineProperty(apiLoader.services['sqs'], '2012-11-05', { + get: function get() { + var model = __nccwpck_require__(53974); + model.paginators = (__nccwpck_require__(17249)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SQS; + + +/***/ }), + +/***/ 83380: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['ssm'] = {}; +AWS.SSM = Service.defineService('ssm', ['2014-11-06']); +Object.defineProperty(apiLoader.services['ssm'], '2014-11-06', { + get: function get() { + var model = __nccwpck_require__(44596); + model.paginators = (__nccwpck_require__(5135)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(98523)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SSM; + + +/***/ }), + +/***/ 12577: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['ssmcontacts'] = {}; +AWS.SSMContacts = Service.defineService('ssmcontacts', ['2021-05-03']); +Object.defineProperty(apiLoader.services['ssmcontacts'], '2021-05-03', { + get: function get() { + var model = __nccwpck_require__(74831); + model.paginators = (__nccwpck_require__(63938)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SSMContacts; + + +/***/ }), + +/***/ 20590: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['ssmincidents'] = {}; +AWS.SSMIncidents = Service.defineService('ssmincidents', ['2018-05-10']); +Object.defineProperty(apiLoader.services['ssmincidents'], '2018-05-10', { + get: function get() { + var model = __nccwpck_require__(18719); + model.paginators = (__nccwpck_require__(4502)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(97755)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SSMIncidents; + + +/***/ }), + +/***/ 44552: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['ssmsap'] = {}; +AWS.SsmSap = Service.defineService('ssmsap', ['2018-05-10']); +Object.defineProperty(apiLoader.services['ssmsap'], '2018-05-10', { + get: function get() { + var model = __nccwpck_require__(49218); + model.paginators = (__nccwpck_require__(94718)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SsmSap; + + +/***/ }), + +/***/ 71096: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['sso'] = {}; +AWS.SSO = Service.defineService('sso', ['2019-06-10']); +Object.defineProperty(apiLoader.services['sso'], '2019-06-10', { + get: function get() { + var model = __nccwpck_require__(8027); + model.paginators = (__nccwpck_require__(36610)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SSO; + + +/***/ }), + +/***/ 66644: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['ssoadmin'] = {}; +AWS.SSOAdmin = Service.defineService('ssoadmin', ['2020-07-20']); +Object.defineProperty(apiLoader.services['ssoadmin'], '2020-07-20', { + get: function get() { + var model = __nccwpck_require__(7239); + model.paginators = (__nccwpck_require__(49402)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SSOAdmin; + + +/***/ }), + +/***/ 49870: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['ssooidc'] = {}; +AWS.SSOOIDC = Service.defineService('ssooidc', ['2019-06-10']); +Object.defineProperty(apiLoader.services['ssooidc'], '2019-06-10', { + get: function get() { + var model = __nccwpck_require__(62343); + model.paginators = (__nccwpck_require__(50215)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SSOOIDC; + + +/***/ }), + +/***/ 8136: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['stepfunctions'] = {}; +AWS.StepFunctions = Service.defineService('stepfunctions', ['2016-11-23']); +Object.defineProperty(apiLoader.services['stepfunctions'], '2016-11-23', { + get: function get() { + var model = __nccwpck_require__(85693); + model.paginators = (__nccwpck_require__(24818)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.StepFunctions; + + +/***/ }), + +/***/ 89190: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['storagegateway'] = {}; +AWS.StorageGateway = Service.defineService('storagegateway', ['2013-06-30']); +Object.defineProperty(apiLoader.services['storagegateway'], '2013-06-30', { + get: function get() { + var model = __nccwpck_require__(11069); + model.paginators = (__nccwpck_require__(33999)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.StorageGateway; + + +/***/ }), + +/***/ 57513: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['sts'] = {}; +AWS.STS = Service.defineService('sts', ['2011-06-15']); +__nccwpck_require__(91055); +Object.defineProperty(apiLoader.services['sts'], '2011-06-15', { + get: function get() { + var model = __nccwpck_require__(80753); + model.paginators = (__nccwpck_require__(93639)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.STS; + + +/***/ }), + +/***/ 39674: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['supplychain'] = {}; +AWS.SupplyChain = Service.defineService('supplychain', ['2024-01-01']); +Object.defineProperty(apiLoader.services['supplychain'], '2024-01-01', { + get: function get() { + var model = __nccwpck_require__(78323); + model.paginators = (__nccwpck_require__(25916)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SupplyChain; + + +/***/ }), + +/***/ 1099: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['support'] = {}; +AWS.Support = Service.defineService('support', ['2013-04-15']); +Object.defineProperty(apiLoader.services['support'], '2013-04-15', { + get: function get() { + var model = __nccwpck_require__(20767); + model.paginators = (__nccwpck_require__(62491)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Support; + + +/***/ }), + +/***/ 51288: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['supportapp'] = {}; +AWS.SupportApp = Service.defineService('supportapp', ['2021-08-20']); +Object.defineProperty(apiLoader.services['supportapp'], '2021-08-20', { + get: function get() { + var model = __nccwpck_require__(94851); + model.paginators = (__nccwpck_require__(60546)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SupportApp; + + +/***/ }), + +/***/ 32327: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['swf'] = {}; +AWS.SWF = Service.defineService('swf', ['2012-01-25']); +__nccwpck_require__(31987); +Object.defineProperty(apiLoader.services['swf'], '2012-01-25', { + get: function get() { + var model = __nccwpck_require__(11144); + model.paginators = (__nccwpck_require__(48039)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.SWF; + + +/***/ }), + +/***/ 25910: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['synthetics'] = {}; +AWS.Synthetics = Service.defineService('synthetics', ['2017-10-11']); +Object.defineProperty(apiLoader.services['synthetics'], '2017-10-11', { + get: function get() { + var model = __nccwpck_require__(78752); + model.paginators = (__nccwpck_require__(61615)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Synthetics; + + +/***/ }), + +/***/ 58523: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['textract'] = {}; +AWS.Textract = Service.defineService('textract', ['2018-06-27']); +Object.defineProperty(apiLoader.services['textract'], '2018-06-27', { + get: function get() { + var model = __nccwpck_require__(49753); + model.paginators = (__nccwpck_require__(16270)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Textract; + + +/***/ }), + +/***/ 24529: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['timestreamquery'] = {}; +AWS.TimestreamQuery = Service.defineService('timestreamquery', ['2018-11-01']); +Object.defineProperty(apiLoader.services['timestreamquery'], '2018-11-01', { + get: function get() { + var model = __nccwpck_require__(70457); + model.paginators = (__nccwpck_require__(97217)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.TimestreamQuery; + + +/***/ }), + +/***/ 1573: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['timestreamwrite'] = {}; +AWS.TimestreamWrite = Service.defineService('timestreamwrite', ['2018-11-01']); +Object.defineProperty(apiLoader.services['timestreamwrite'], '2018-11-01', { + get: function get() { + var model = __nccwpck_require__(8368); + model.paginators = (__nccwpck_require__(89653)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.TimestreamWrite; + + +/***/ }), + +/***/ 15300: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['tnb'] = {}; +AWS.Tnb = Service.defineService('tnb', ['2008-10-21']); +Object.defineProperty(apiLoader.services['tnb'], '2008-10-21', { + get: function get() { + var model = __nccwpck_require__(1433); + model.paginators = (__nccwpck_require__(55995)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Tnb; + + +/***/ }), + +/***/ 75811: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['transcribeservice'] = {}; +AWS.TranscribeService = Service.defineService('transcribeservice', ['2017-10-26']); +Object.defineProperty(apiLoader.services['transcribeservice'], '2017-10-26', { + get: function get() { + var model = __nccwpck_require__(47294); + model.paginators = (__nccwpck_require__(25395)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.TranscribeService; + + +/***/ }), + +/***/ 51585: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['transfer'] = {}; +AWS.Transfer = Service.defineService('transfer', ['2018-11-05']); +Object.defineProperty(apiLoader.services['transfer'], '2018-11-05', { + get: function get() { + var model = __nccwpck_require__(93419); + model.paginators = (__nccwpck_require__(65803)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(45405)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Transfer; + + +/***/ }), + +/***/ 72544: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['translate'] = {}; +AWS.Translate = Service.defineService('translate', ['2017-07-01']); +Object.defineProperty(apiLoader.services['translate'], '2017-07-01', { + get: function get() { + var model = __nccwpck_require__(61084); + model.paginators = (__nccwpck_require__(40304)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Translate; + + +/***/ }), + +/***/ 4992: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['trustedadvisor'] = {}; +AWS.TrustedAdvisor = Service.defineService('trustedadvisor', ['2022-09-15']); +Object.defineProperty(apiLoader.services['trustedadvisor'], '2022-09-15', { + get: function get() { + var model = __nccwpck_require__(3889); + model.paginators = (__nccwpck_require__(89124)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.TrustedAdvisor; + + +/***/ }), + +/***/ 35604: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['verifiedpermissions'] = {}; +AWS.VerifiedPermissions = Service.defineService('verifiedpermissions', ['2021-12-01']); +Object.defineProperty(apiLoader.services['verifiedpermissions'], '2021-12-01', { + get: function get() { + var model = __nccwpck_require__(31407); + model.paginators = (__nccwpck_require__(85997)/* .pagination */ .o); + model.waiters = (__nccwpck_require__(14021)/* .waiters */ .V); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.VerifiedPermissions; + + +/***/ }), + +/***/ 28747: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['voiceid'] = {}; +AWS.VoiceID = Service.defineService('voiceid', ['2021-09-27']); +Object.defineProperty(apiLoader.services['voiceid'], '2021-09-27', { + get: function get() { + var model = __nccwpck_require__(9375); + model.paginators = (__nccwpck_require__(59512)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.VoiceID; + + +/***/ }), + +/***/ 78952: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['vpclattice'] = {}; +AWS.VPCLattice = Service.defineService('vpclattice', ['2022-11-30']); +Object.defineProperty(apiLoader.services['vpclattice'], '2022-11-30', { + get: function get() { + var model = __nccwpck_require__(49656); + model.paginators = (__nccwpck_require__(98717)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.VPCLattice; + + +/***/ }), + +/***/ 72742: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['waf'] = {}; +AWS.WAF = Service.defineService('waf', ['2015-08-24']); +Object.defineProperty(apiLoader.services['waf'], '2015-08-24', { + get: function get() { + var model = __nccwpck_require__(37925); + model.paginators = (__nccwpck_require__(65794)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.WAF; + + +/***/ }), + +/***/ 23153: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['wafregional'] = {}; +AWS.WAFRegional = Service.defineService('wafregional', ['2016-11-28']); +Object.defineProperty(apiLoader.services['wafregional'], '2016-11-28', { + get: function get() { + var model = __nccwpck_require__(20014); + model.paginators = (__nccwpck_require__(66829)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.WAFRegional; + + +/***/ }), + +/***/ 50353: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['wafv2'] = {}; +AWS.WAFV2 = Service.defineService('wafv2', ['2019-07-29']); +Object.defineProperty(apiLoader.services['wafv2'], '2019-07-29', { + get: function get() { + var model = __nccwpck_require__(51872); + model.paginators = (__nccwpck_require__(33900)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.WAFV2; + + +/***/ }), + +/***/ 86263: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['wellarchitected'] = {}; +AWS.WellArchitected = Service.defineService('wellarchitected', ['2020-03-31']); +Object.defineProperty(apiLoader.services['wellarchitected'], '2020-03-31', { + get: function get() { + var model = __nccwpck_require__(19249); + model.paginators = (__nccwpck_require__(54693)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.WellArchitected; + + +/***/ }), + +/***/ 85266: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['wisdom'] = {}; +AWS.Wisdom = Service.defineService('wisdom', ['2020-10-19']); +Object.defineProperty(apiLoader.services['wisdom'], '2020-10-19', { + get: function get() { + var model = __nccwpck_require__(94385); + model.paginators = (__nccwpck_require__(54852)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.Wisdom; + + +/***/ }), + +/***/ 38835: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['workdocs'] = {}; +AWS.WorkDocs = Service.defineService('workdocs', ['2016-05-01']); +Object.defineProperty(apiLoader.services['workdocs'], '2016-05-01', { + get: function get() { + var model = __nccwpck_require__(41052); + model.paginators = (__nccwpck_require__(94768)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.WorkDocs; + + +/***/ }), + +/***/ 48579: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['worklink'] = {}; +AWS.WorkLink = Service.defineService('worklink', ['2018-09-25']); +Object.defineProperty(apiLoader.services['worklink'], '2018-09-25', { + get: function get() { + var model = __nccwpck_require__(37178); + model.paginators = (__nccwpck_require__(74073)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.WorkLink; + + +/***/ }), + +/***/ 38374: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['workmail'] = {}; +AWS.WorkMail = Service.defineService('workmail', ['2017-10-01']); +Object.defineProperty(apiLoader.services['workmail'], '2017-10-01', { + get: function get() { + var model = __nccwpck_require__(93150); + model.paginators = (__nccwpck_require__(5158)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.WorkMail; + + +/***/ }), + +/***/ 67025: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['workmailmessageflow'] = {}; +AWS.WorkMailMessageFlow = Service.defineService('workmailmessageflow', ['2019-05-01']); +Object.defineProperty(apiLoader.services['workmailmessageflow'], '2019-05-01', { + get: function get() { + var model = __nccwpck_require__(57733); + model.paginators = (__nccwpck_require__(85646)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.WorkMailMessageFlow; + + +/***/ }), + +/***/ 25513: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['workspaces'] = {}; +AWS.WorkSpaces = Service.defineService('workspaces', ['2015-04-08']); +Object.defineProperty(apiLoader.services['workspaces'], '2015-04-08', { + get: function get() { + var model = __nccwpck_require__(97805); + model.paginators = (__nccwpck_require__(27769)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.WorkSpaces; + + +/***/ }), + +/***/ 22033: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['workspacesthinclient'] = {}; +AWS.WorkSpacesThinClient = Service.defineService('workspacesthinclient', ['2023-08-22']); +Object.defineProperty(apiLoader.services['workspacesthinclient'], '2023-08-22', { + get: function get() { + var model = __nccwpck_require__(24229); + model.paginators = (__nccwpck_require__(52084)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.WorkSpacesThinClient; + + +/***/ }), + +/***/ 94124: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['workspacesweb'] = {}; +AWS.WorkSpacesWeb = Service.defineService('workspacesweb', ['2020-07-08']); +Object.defineProperty(apiLoader.services['workspacesweb'], '2020-07-08', { + get: function get() { + var model = __nccwpck_require__(47128); + model.paginators = (__nccwpck_require__(43497)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.WorkSpacesWeb; + + +/***/ }), + +/***/ 41548: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); +var AWS = __nccwpck_require__(28437); +var Service = AWS.Service; +var apiLoader = AWS.apiLoader; + +apiLoader.services['xray'] = {}; +AWS.XRay = Service.defineService('xray', ['2016-04-12']); +Object.defineProperty(apiLoader.services['xray'], '2016-04-12', { + get: function get() { + var model = __nccwpck_require__(97355); + model.paginators = (__nccwpck_require__(97949)/* .pagination */ .o); + return model; + }, + enumerable: true, + configurable: true +}); + +module.exports = AWS.XRay; + + +/***/ }), + +/***/ 52793: +/***/ ((module) => { + +function apiLoader(svc, version) { + if (!apiLoader.services.hasOwnProperty(svc)) { + throw new Error('InvalidService: Failed to load api for ' + svc); + } + return apiLoader.services[svc][version]; +} + +/** + * @api private + * + * This member of AWS.apiLoader is private, but changing it will necessitate a + * change to ../scripts/services-table-generator.ts + */ +apiLoader.services = {}; + +/** + * @api private + */ +module.exports = apiLoader; + + +/***/ }), + +/***/ 71786: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(73639); + +var AWS = __nccwpck_require__(28437); + +// Load all service classes +__nccwpck_require__(26296); + +/** + * @api private + */ +module.exports = AWS; + + +/***/ }), + +/***/ 93260: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437), + url = AWS.util.url, + crypto = AWS.util.crypto.lib, + base64Encode = AWS.util.base64.encode, + inherit = AWS.util.inherit; + +var queryEncode = function (string) { + var replacements = { + '+': '-', + '=': '_', + '/': '~' + }; + return string.replace(/[\+=\/]/g, function (match) { + return replacements[match]; + }); +}; + +var signPolicy = function (policy, privateKey) { + var sign = crypto.createSign('RSA-SHA1'); + sign.write(policy); + return queryEncode(sign.sign(privateKey, 'base64')); +}; + +var signWithCannedPolicy = function (url, expires, keyPairId, privateKey) { + var policy = JSON.stringify({ + Statement: [ + { + Resource: url, + Condition: { DateLessThan: { 'AWS:EpochTime': expires } } + } + ] + }); + + return { + Expires: expires, + 'Key-Pair-Id': keyPairId, + Signature: signPolicy(policy.toString(), privateKey) + }; +}; + +var signWithCustomPolicy = function (policy, keyPairId, privateKey) { + policy = policy.replace(/\s/mg, ''); + + return { + Policy: queryEncode(base64Encode(policy)), + 'Key-Pair-Id': keyPairId, + Signature: signPolicy(policy, privateKey) + }; +}; + +var determineScheme = function (url) { + var parts = url.split('://'); + if (parts.length < 2) { + throw new Error('Invalid URL.'); + } + + return parts[0].replace('*', ''); +}; + +var getRtmpUrl = function (rtmpUrl) { + var parsed = url.parse(rtmpUrl); + return parsed.path.replace(/^\//, '') + (parsed.hash || ''); +}; + +var getResource = function (url) { + switch (determineScheme(url)) { + case 'http': + case 'https': + return url; + case 'rtmp': + return getRtmpUrl(url); + default: + throw new Error('Invalid URI scheme. Scheme must be one of' + + ' http, https, or rtmp'); + } +}; + +var handleError = function (err, callback) { + if (!callback || typeof callback !== 'function') { + throw err; + } + + callback(err); +}; + +var handleSuccess = function (result, callback) { + if (!callback || typeof callback !== 'function') { + return result; + } + + callback(null, result); +}; + +AWS.CloudFront.Signer = inherit({ + /** + * A signer object can be used to generate signed URLs and cookies for granting + * access to content on restricted CloudFront distributions. + * + * @see http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/PrivateContent.html + * + * @param keyPairId [String] (Required) The ID of the CloudFront key pair + * being used. + * @param privateKey [String] (Required) A private key in RSA format. + */ + constructor: function Signer(keyPairId, privateKey) { + if (keyPairId === void 0 || privateKey === void 0) { + throw new Error('A key pair ID and private key are required'); + } + + this.keyPairId = keyPairId; + this.privateKey = privateKey; + }, + + /** + * Create a signed Amazon CloudFront Cookie. + * + * @param options [Object] The options to create a signed cookie. + * @option options url [String] The URL to which the signature will grant + * access. Required unless you pass in a full + * policy. + * @option options expires [Number] A Unix UTC timestamp indicating when the + * signature should expire. Required unless you + * pass in a full policy. + * @option options policy [String] A CloudFront JSON policy. Required unless + * you pass in a url and an expiry time. + * + * @param cb [Function] if a callback is provided, this function will + * pass the hash as the second parameter (after the error parameter) to + * the callback function. + * + * @return [Object] if called synchronously (with no callback), returns the + * signed cookie parameters. + * @return [null] nothing is returned if a callback is provided. + */ + getSignedCookie: function (options, cb) { + var signatureHash = 'policy' in options + ? signWithCustomPolicy(options.policy, this.keyPairId, this.privateKey) + : signWithCannedPolicy(options.url, options.expires, this.keyPairId, this.privateKey); + + var cookieHash = {}; + for (var key in signatureHash) { + if (Object.prototype.hasOwnProperty.call(signatureHash, key)) { + cookieHash['CloudFront-' + key] = signatureHash[key]; + } + } + + return handleSuccess(cookieHash, cb); + }, + + /** + * Create a signed Amazon CloudFront URL. + * + * Keep in mind that URLs meant for use in media/flash players may have + * different requirements for URL formats (e.g. some require that the + * extension be removed, some require the file name to be prefixed + * - mp4:, some require you to add "/cfx/st" into your URL). + * + * @param options [Object] The options to create a signed URL. + * @option options url [String] The URL to which the signature will grant + * access. Any query params included with + * the URL should be encoded. Required. + * @option options expires [Number] A Unix UTC timestamp indicating when the + * signature should expire. Required unless you + * pass in a full policy. + * @option options policy [String] A CloudFront JSON policy. Required unless + * you pass in a url and an expiry time. + * + * @param cb [Function] if a callback is provided, this function will + * pass the URL as the second parameter (after the error parameter) to + * the callback function. + * + * @return [String] if called synchronously (with no callback), returns the + * signed URL. + * @return [null] nothing is returned if a callback is provided. + */ + getSignedUrl: function (options, cb) { + try { + var resource = getResource(options.url); + } catch (err) { + return handleError(err, cb); + } + + var parsedUrl = url.parse(options.url, true), + signatureHash = Object.prototype.hasOwnProperty.call(options, 'policy') + ? signWithCustomPolicy(options.policy, this.keyPairId, this.privateKey) + : signWithCannedPolicy(resource, options.expires, this.keyPairId, this.privateKey); + + parsedUrl.search = null; + for (var key in signatureHash) { + if (Object.prototype.hasOwnProperty.call(signatureHash, key)) { + parsedUrl.query[key] = signatureHash[key]; + } + } + + try { + var signedUrl = determineScheme(options.url) === 'rtmp' + ? getRtmpUrl(url.format(parsedUrl)) + : url.format(parsedUrl); + } catch (err) { + return handleError(err, cb); + } + + return handleSuccess(signedUrl, cb); + } +}); + +/** + * @api private + */ +module.exports = AWS.CloudFront.Signer; + + +/***/ }), + +/***/ 38110: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +__nccwpck_require__(53819); +__nccwpck_require__(36965); +var PromisesDependency; + +/** + * The main configuration class used by all service objects to set + * the region, credentials, and other options for requests. + * + * By default, credentials and region settings are left unconfigured. + * This should be configured by the application before using any + * AWS service APIs. + * + * In order to set global configuration options, properties should + * be assigned to the global {AWS.config} object. + * + * @see AWS.config + * + * @!group General Configuration Options + * + * @!attribute credentials + * @return [AWS.Credentials] the AWS credentials to sign requests with. + * + * @!attribute region + * @example Set the global region setting to us-west-2 + * AWS.config.update({region: 'us-west-2'}); + * @return [AWS.Credentials] The region to send service requests to. + * @see http://docs.amazonwebservices.com/general/latest/gr/rande.html + * A list of available endpoints for each AWS service + * + * @!attribute maxRetries + * @return [Integer] the maximum amount of retries to perform for a + * service request. By default this value is calculated by the specific + * service object that the request is being made to. + * + * @!attribute maxRedirects + * @return [Integer] the maximum amount of redirects to follow for a + * service request. Defaults to 10. + * + * @!attribute paramValidation + * @return [Boolean|map] whether input parameters should be validated against + * the operation description before sending the request. Defaults to true. + * Pass a map to enable any of the following specific validation features: + * + * * **min** [Boolean] — Validates that a value meets the min + * constraint. This is enabled by default when paramValidation is set + * to `true`. + * * **max** [Boolean] — Validates that a value meets the max + * constraint. + * * **pattern** [Boolean] — Validates that a string value matches a + * regular expression. + * * **enum** [Boolean] — Validates that a string value matches one + * of the allowable enum values. + * + * @!attribute computeChecksums + * @return [Boolean] whether to compute checksums for payload bodies when + * the service accepts it (currently supported in S3 and SQS only). + * + * @!attribute convertResponseTypes + * @return [Boolean] whether types are converted when parsing response data. + * Currently only supported for JSON based services. Turning this off may + * improve performance on large response payloads. Defaults to `true`. + * + * @!attribute correctClockSkew + * @return [Boolean] whether to apply a clock skew correction and retry + * requests that fail because of an skewed client clock. Defaults to + * `false`. + * + * @!attribute sslEnabled + * @return [Boolean] whether SSL is enabled for requests + * + * @!attribute s3ForcePathStyle + * @return [Boolean] whether to force path style URLs for S3 objects + * + * @!attribute s3BucketEndpoint + * @note Setting this configuration option requires an `endpoint` to be + * provided explicitly to the service constructor. + * @return [Boolean] whether the provided endpoint addresses an individual + * bucket (false if it addresses the root API endpoint). + * + * @!attribute s3DisableBodySigning + * @return [Boolean] whether to disable S3 body signing when using signature version `v4`. + * Body signing can only be disabled when using https. Defaults to `true`. + * + * @!attribute s3UsEast1RegionalEndpoint + * @return ['legacy'|'regional'] when region is set to 'us-east-1', whether to send s3 + * request to global endpoints or 'us-east-1' regional endpoints. This config is only + * applicable to S3 client; + * Defaults to 'legacy' + * @!attribute s3UseArnRegion + * @return [Boolean] whether to override the request region with the region inferred + * from requested resource's ARN. Only available for S3 buckets + * Defaults to `true` + * + * @!attribute useAccelerateEndpoint + * @note This configuration option is only compatible with S3 while accessing + * dns-compatible buckets. + * @return [Boolean] Whether to use the Accelerate endpoint with the S3 service. + * Defaults to `false`. + * + * @!attribute retryDelayOptions + * @example Set the base retry delay for all services to 300 ms + * AWS.config.update({retryDelayOptions: {base: 300}}); + * // Delays with maxRetries = 3: 300, 600, 1200 + * @example Set a custom backoff function to provide delay values on retries + * AWS.config.update({retryDelayOptions: {customBackoff: function(retryCount, err) { + * // returns delay in ms + * }}}); + * @return [map] A set of options to configure the retry delay on retryable errors. + * Currently supported options are: + * + * * **base** [Integer] — The base number of milliseconds to use in the + * exponential backoff for operation retries. Defaults to 100 ms for all services except + * DynamoDB, where it defaults to 50ms. + * + * * **customBackoff ** [function] — A custom function that accepts a + * retry count and error and returns the amount of time to delay in + * milliseconds. If the result is a non-zero negative value, no further + * retry attempts will be made. The `base` option will be ignored if this + * option is supplied. The function is only called for retryable errors. + * + * @!attribute httpOptions + * @return [map] A set of options to pass to the low-level HTTP request. + * Currently supported options are: + * + * * **proxy** [String] — the URL to proxy requests through + * * **agent** [http.Agent, https.Agent] — the Agent object to perform + * HTTP requests with. Used for connection pooling. Note that for + * SSL connections, a special Agent object is used in order to enable + * peer certificate verification. This feature is only supported in the + * Node.js environment. + * * **connectTimeout** [Integer] — Sets the socket to timeout after + * failing to establish a connection with the server after + * `connectTimeout` milliseconds. This timeout has no effect once a socket + * connection has been established. + * * **timeout** [Integer] — The number of milliseconds a request can + * take before automatically being terminated. + * Defaults to two minutes (120000). + * * **xhrAsync** [Boolean] — Whether the SDK will send asynchronous + * HTTP requests. Used in the browser environment only. Set to false to + * send requests synchronously. Defaults to true (async on). + * * **xhrWithCredentials** [Boolean] — Sets the "withCredentials" + * property of an XMLHttpRequest object. Used in the browser environment + * only. Defaults to false. + * @!attribute logger + * @return [#write,#log] an object that responds to .write() (like a stream) + * or .log() (like the console object) in order to log information about + * requests + * + * @!attribute systemClockOffset + * @return [Number] an offset value in milliseconds to apply to all signing + * times. Use this to compensate for clock skew when your system may be + * out of sync with the service time. Note that this configuration option + * can only be applied to the global `AWS.config` object and cannot be + * overridden in service-specific configuration. Defaults to 0 milliseconds. + * + * @!attribute signatureVersion + * @return [String] the signature version to sign requests with (overriding + * the API configuration). Possible values are: 'v2', 'v3', 'v4'. + * + * @!attribute signatureCache + * @return [Boolean] whether the signature to sign requests with (overriding + * the API configuration) is cached. Only applies to the signature version 'v4'. + * Defaults to `true`. + * + * @!attribute endpointDiscoveryEnabled + * @return [Boolean|undefined] whether to call operations with endpoints + * given by service dynamically. Setting this config to `true` will enable + * endpoint discovery for all applicable operations. Setting it to `false` + * will explicitly disable endpoint discovery even though operations that + * require endpoint discovery will presumably fail. Leaving it to + * `undefined` means SDK only do endpoint discovery when it's required. + * Defaults to `undefined` + * + * @!attribute endpointCacheSize + * @return [Number] the size of the global cache storing endpoints from endpoint + * discovery operations. Once endpoint cache is created, updating this setting + * cannot change existing cache size. + * Defaults to 1000 + * + * @!attribute hostPrefixEnabled + * @return [Boolean] whether to marshal request parameters to the prefix of + * hostname. Defaults to `true`. + * + * @!attribute stsRegionalEndpoints + * @return ['legacy'|'regional'] whether to send sts request to global endpoints or + * regional endpoints. + * Defaults to 'legacy'. + * + * @!attribute useFipsEndpoint + * @return [Boolean] Enables FIPS compatible endpoints. Defaults to `false`. + * + * @!attribute useDualstackEndpoint + * @return [Boolean] Enables IPv6 dualstack endpoint. Defaults to `false`. + */ +AWS.Config = AWS.util.inherit({ + /** + * @!endgroup + */ + + /** + * Creates a new configuration object. This is the object that passes + * option data along to service requests, including credentials, security, + * region information, and some service specific settings. + * + * @example Creating a new configuration object with credentials and region + * var config = new AWS.Config({ + * accessKeyId: 'AKID', secretAccessKey: 'SECRET', region: 'us-west-2' + * }); + * @option options accessKeyId [String] your AWS access key ID. + * @option options secretAccessKey [String] your AWS secret access key. + * @option options sessionToken [AWS.Credentials] the optional AWS + * session token to sign requests with. + * @option options credentials [AWS.Credentials] the AWS credentials + * to sign requests with. You can either specify this object, or + * specify the accessKeyId and secretAccessKey options directly. + * @option options credentialProvider [AWS.CredentialProviderChain] the + * provider chain used to resolve credentials if no static `credentials` + * property is set. + * @option options region [String] the region to send service requests to. + * See {region} for more information. + * @option options maxRetries [Integer] the maximum amount of retries to + * attempt with a request. See {maxRetries} for more information. + * @option options maxRedirects [Integer] the maximum amount of redirects to + * follow with a request. See {maxRedirects} for more information. + * @option options sslEnabled [Boolean] whether to enable SSL for + * requests. + * @option options paramValidation [Boolean|map] whether input parameters + * should be validated against the operation description before sending + * the request. Defaults to true. Pass a map to enable any of the + * following specific validation features: + * + * * **min** [Boolean] — Validates that a value meets the min + * constraint. This is enabled by default when paramValidation is set + * to `true`. + * * **max** [Boolean] — Validates that a value meets the max + * constraint. + * * **pattern** [Boolean] — Validates that a string value matches a + * regular expression. + * * **enum** [Boolean] — Validates that a string value matches one + * of the allowable enum values. + * @option options computeChecksums [Boolean] whether to compute checksums + * for payload bodies when the service accepts it (currently supported + * in S3 only) + * @option options convertResponseTypes [Boolean] whether types are converted + * when parsing response data. Currently only supported for JSON based + * services. Turning this off may improve performance on large response + * payloads. Defaults to `true`. + * @option options correctClockSkew [Boolean] whether to apply a clock skew + * correction and retry requests that fail because of an skewed client + * clock. Defaults to `false`. + * @option options s3ForcePathStyle [Boolean] whether to force path + * style URLs for S3 objects. + * @option options s3BucketEndpoint [Boolean] whether the provided endpoint + * addresses an individual bucket (false if it addresses the root API + * endpoint). Note that setting this configuration option requires an + * `endpoint` to be provided explicitly to the service constructor. + * @option options s3DisableBodySigning [Boolean] whether S3 body signing + * should be disabled when using signature version `v4`. Body signing + * can only be disabled when using https. Defaults to `true`. + * @option options s3UsEast1RegionalEndpoint ['legacy'|'regional'] when region + * is set to 'us-east-1', whether to send s3 request to global endpoints or + * 'us-east-1' regional endpoints. This config is only applicable to S3 client. + * Defaults to `legacy` + * @option options s3UseArnRegion [Boolean] whether to override the request region + * with the region inferred from requested resource's ARN. Only available for S3 buckets + * Defaults to `true` + * + * @option options retryDelayOptions [map] A set of options to configure + * the retry delay on retryable errors. Currently supported options are: + * + * * **base** [Integer] — The base number of milliseconds to use in the + * exponential backoff for operation retries. Defaults to 100 ms for all + * services except DynamoDB, where it defaults to 50ms. + * * **customBackoff ** [function] — A custom function that accepts a + * retry count and error and returns the amount of time to delay in + * milliseconds. If the result is a non-zero negative value, no further + * retry attempts will be made. The `base` option will be ignored if this + * option is supplied. The function is only called for retryable errors. + * @option options httpOptions [map] A set of options to pass to the low-level + * HTTP request. Currently supported options are: + * + * * **proxy** [String] — the URL to proxy requests through + * * **agent** [http.Agent, https.Agent] — the Agent object to perform + * HTTP requests with. Used for connection pooling. Defaults to the global + * agent (`http.globalAgent`) for non-SSL connections. Note that for + * SSL connections, a special Agent object is used in order to enable + * peer certificate verification. This feature is only available in the + * Node.js environment. + * * **connectTimeout** [Integer] — Sets the socket to timeout after + * failing to establish a connection with the server after + * `connectTimeout` milliseconds. This timeout has no effect once a socket + * connection has been established. + * * **timeout** [Integer] — Sets the socket to timeout after timeout + * milliseconds of inactivity on the socket. Defaults to two minutes + * (120000). + * * **xhrAsync** [Boolean] — Whether the SDK will send asynchronous + * HTTP requests. Used in the browser environment only. Set to false to + * send requests synchronously. Defaults to true (async on). + * * **xhrWithCredentials** [Boolean] — Sets the "withCredentials" + * property of an XMLHttpRequest object. Used in the browser environment + * only. Defaults to false. + * @option options apiVersion [String, Date] a String in YYYY-MM-DD format + * (or a date) that represents the latest possible API version that can be + * used in all services (unless overridden by `apiVersions`). Specify + * 'latest' to use the latest possible version. + * @option options apiVersions [map] a map of service + * identifiers (the lowercase service class name) with the API version to + * use when instantiating a service. Specify 'latest' for each individual + * that can use the latest available version. + * @option options logger [#write,#log] an object that responds to .write() + * (like a stream) or .log() (like the console object) in order to log + * information about requests + * @option options systemClockOffset [Number] an offset value in milliseconds + * to apply to all signing times. Use this to compensate for clock skew + * when your system may be out of sync with the service time. Note that + * this configuration option can only be applied to the global `AWS.config` + * object and cannot be overridden in service-specific configuration. + * Defaults to 0 milliseconds. + * @option options signatureVersion [String] the signature version to sign + * requests with (overriding the API configuration). Possible values are: + * 'v2', 'v3', 'v4'. + * @option options signatureCache [Boolean] whether the signature to sign + * requests with (overriding the API configuration) is cached. Only applies + * to the signature version 'v4'. Defaults to `true`. + * @option options dynamoDbCrc32 [Boolean] whether to validate the CRC32 + * checksum of HTTP response bodies returned by DynamoDB. Default: `true`. + * @option options useAccelerateEndpoint [Boolean] Whether to use the + * S3 Transfer Acceleration endpoint with the S3 service. Default: `false`. + * @option options clientSideMonitoring [Boolean] whether to collect and + * publish this client's performance metrics of all its API requests. + * @option options endpointDiscoveryEnabled [Boolean|undefined] whether to + * call operations with endpoints given by service dynamically. Setting this + * config to `true` will enable endpoint discovery for all applicable operations. + * Setting it to `false` will explicitly disable endpoint discovery even though + * operations that require endpoint discovery will presumably fail. Leaving it + * to `undefined` means SDK will only do endpoint discovery when it's required. + * Defaults to `undefined` + * @option options endpointCacheSize [Number] the size of the global cache storing + * endpoints from endpoint discovery operations. Once endpoint cache is created, + * updating this setting cannot change existing cache size. + * Defaults to 1000 + * @option options hostPrefixEnabled [Boolean] whether to marshal request + * parameters to the prefix of hostname. + * Defaults to `true`. + * @option options stsRegionalEndpoints ['legacy'|'regional'] whether to send sts request + * to global endpoints or regional endpoints. + * Defaults to 'legacy'. + * @option options useFipsEndpoint [Boolean] Enables FIPS compatible endpoints. + * Defaults to `false`. + * @option options useDualstackEndpoint [Boolean] Enables IPv6 dualstack endpoint. + * Defaults to `false`. + */ + constructor: function Config(options) { + if (options === undefined) options = {}; + options = this.extractCredentials(options); + + AWS.util.each.call(this, this.keys, function (key, value) { + this.set(key, options[key], value); + }); + }, + + /** + * @!group Managing Credentials + */ + + /** + * Loads credentials from the configuration object. This is used internally + * by the SDK to ensure that refreshable {Credentials} objects are properly + * refreshed and loaded when sending a request. If you want to ensure that + * your credentials are loaded prior to a request, you can use this method + * directly to provide accurate credential data stored in the object. + * + * @note If you configure the SDK with static or environment credentials, + * the credential data should already be present in {credentials} attribute. + * This method is primarily necessary to load credentials from asynchronous + * sources, or sources that can refresh credentials periodically. + * @example Getting your access key + * AWS.config.getCredentials(function(err) { + * if (err) console.log(err.stack); // credentials not loaded + * else console.log("Access Key:", AWS.config.credentials.accessKeyId); + * }) + * @callback callback function(err) + * Called when the {credentials} have been properly set on the configuration + * object. + * + * @param err [Error] if this is set, credentials were not successfully + * loaded and this error provides information why. + * @see credentials + * @see Credentials + */ + getCredentials: function getCredentials(callback) { + var self = this; + + function finish(err) { + callback(err, err ? null : self.credentials); + } + + function credError(msg, err) { + return new AWS.util.error(err || new Error(), { + code: 'CredentialsError', + message: msg, + name: 'CredentialsError' + }); + } + + function getAsyncCredentials() { + self.credentials.get(function(err) { + if (err) { + var msg = 'Could not load credentials from ' + + self.credentials.constructor.name; + err = credError(msg, err); + } + finish(err); + }); + } + + function getStaticCredentials() { + var err = null; + if (!self.credentials.accessKeyId || !self.credentials.secretAccessKey) { + err = credError('Missing credentials'); + } + finish(err); + } + + if (self.credentials) { + if (typeof self.credentials.get === 'function') { + getAsyncCredentials(); + } else { // static credentials + getStaticCredentials(); + } + } else if (self.credentialProvider) { + self.credentialProvider.resolve(function(err, creds) { + if (err) { + err = credError('Could not load credentials from any providers', err); + } + self.credentials = creds; + finish(err); + }); + } else { + finish(credError('No credentials to load')); + } + }, + + /** + * Loads token from the configuration object. This is used internally + * by the SDK to ensure that refreshable {Token} objects are properly + * refreshed and loaded when sending a request. If you want to ensure that + * your token is loaded prior to a request, you can use this method + * directly to provide accurate token data stored in the object. + * + * @note If you configure the SDK with static token, the token data should + * already be present in {token} attribute. This method is primarily necessary + * to load token from asynchronous sources, or sources that can refresh + * token periodically. + * @example Getting your access token + * AWS.config.getToken(function(err) { + * if (err) console.log(err.stack); // token not loaded + * else console.log("Token:", AWS.config.token.token); + * }) + * @callback callback function(err) + * Called when the {token} have been properly set on the configuration object. + * + * @param err [Error] if this is set, token was not successfully loaded and + * this error provides information why. + * @see token + */ + getToken: function getToken(callback) { + var self = this; + + function finish(err) { + callback(err, err ? null : self.token); + } + + function tokenError(msg, err) { + return new AWS.util.error(err || new Error(), { + code: 'TokenError', + message: msg, + name: 'TokenError' + }); + } + + function getAsyncToken() { + self.token.get(function(err) { + if (err) { + var msg = 'Could not load token from ' + + self.token.constructor.name; + err = tokenError(msg, err); + } + finish(err); + }); + } + + function getStaticToken() { + var err = null; + if (!self.token.token) { + err = tokenError('Missing token'); + } + finish(err); + } + + if (self.token) { + if (typeof self.token.get === 'function') { + getAsyncToken(); + } else { // static token + getStaticToken(); + } + } else if (self.tokenProvider) { + self.tokenProvider.resolve(function(err, token) { + if (err) { + err = tokenError('Could not load token from any providers', err); + } + self.token = token; + finish(err); + }); + } else { + finish(tokenError('No token to load')); + } + }, + + /** + * @!group Loading and Setting Configuration Options + */ + + /** + * @overload update(options, allowUnknownKeys = false) + * Updates the current configuration object with new options. + * + * @example Update maxRetries property of a configuration object + * config.update({maxRetries: 10}); + * @param [Object] options a map of option keys and values. + * @param [Boolean] allowUnknownKeys whether unknown keys can be set on + * the configuration object. Defaults to `false`. + * @see constructor + */ + update: function update(options, allowUnknownKeys) { + allowUnknownKeys = allowUnknownKeys || false; + options = this.extractCredentials(options); + AWS.util.each.call(this, options, function (key, value) { + if (allowUnknownKeys || Object.prototype.hasOwnProperty.call(this.keys, key) || + AWS.Service.hasService(key)) { + this.set(key, value); + } + }); + }, + + /** + * Loads configuration data from a JSON file into this config object. + * @note Loading configuration will reset all existing configuration + * on the object. + * @!macro nobrowser + * @param path [String] the path relative to your process's current + * working directory to load configuration from. + * @return [AWS.Config] the same configuration object + */ + loadFromPath: function loadFromPath(path) { + this.clear(); + + var options = JSON.parse(AWS.util.readFileSync(path)); + var fileSystemCreds = new AWS.FileSystemCredentials(path); + var chain = new AWS.CredentialProviderChain(); + chain.providers.unshift(fileSystemCreds); + chain.resolve(function (err, creds) { + if (err) throw err; + else options.credentials = creds; + }); + + this.constructor(options); + + return this; + }, + + /** + * Clears configuration data on this object + * + * @api private + */ + clear: function clear() { + /*jshint forin:false */ + AWS.util.each.call(this, this.keys, function (key) { + delete this[key]; + }); + + // reset credential provider + this.set('credentials', undefined); + this.set('credentialProvider', undefined); + }, + + /** + * Sets a property on the configuration object, allowing for a + * default value + * @api private + */ + set: function set(property, value, defaultValue) { + if (value === undefined) { + if (defaultValue === undefined) { + defaultValue = this.keys[property]; + } + if (typeof defaultValue === 'function') { + this[property] = defaultValue.call(this); + } else { + this[property] = defaultValue; + } + } else if (property === 'httpOptions' && this[property]) { + // deep merge httpOptions + this[property] = AWS.util.merge(this[property], value); + } else { + this[property] = value; + } + }, + + /** + * All of the keys with their default values. + * + * @constant + * @api private + */ + keys: { + credentials: null, + credentialProvider: null, + region: null, + logger: null, + apiVersions: {}, + apiVersion: null, + endpoint: undefined, + httpOptions: { + timeout: 120000 + }, + maxRetries: undefined, + maxRedirects: 10, + paramValidation: true, + sslEnabled: true, + s3ForcePathStyle: false, + s3BucketEndpoint: false, + s3DisableBodySigning: true, + s3UsEast1RegionalEndpoint: 'legacy', + s3UseArnRegion: undefined, + computeChecksums: true, + convertResponseTypes: true, + correctClockSkew: false, + customUserAgent: null, + dynamoDbCrc32: true, + systemClockOffset: 0, + signatureVersion: null, + signatureCache: true, + retryDelayOptions: {}, + useAccelerateEndpoint: false, + clientSideMonitoring: false, + endpointDiscoveryEnabled: undefined, + endpointCacheSize: 1000, + hostPrefixEnabled: true, + stsRegionalEndpoints: 'legacy', + useFipsEndpoint: false, + useDualstackEndpoint: false, + token: null + }, + + /** + * Extracts accessKeyId, secretAccessKey and sessionToken + * from a configuration hash. + * + * @api private + */ + extractCredentials: function extractCredentials(options) { + if (options.accessKeyId && options.secretAccessKey) { + options = AWS.util.copy(options); + options.credentials = new AWS.Credentials(options); + } + return options; + }, + + /** + * Sets the promise dependency the SDK will use wherever Promises are returned. + * Passing `null` will force the SDK to use native Promises if they are available. + * If native Promises are not available, passing `null` will have no effect. + * @param [Constructor] dep A reference to a Promise constructor + */ + setPromisesDependency: function setPromisesDependency(dep) { + PromisesDependency = dep; + // if null was passed in, we should try to use native promises + if (dep === null && typeof Promise === 'function') { + PromisesDependency = Promise; + } + var constructors = [AWS.Request, AWS.Credentials, AWS.CredentialProviderChain]; + if (AWS.S3) { + constructors.push(AWS.S3); + if (AWS.S3.ManagedUpload) { + constructors.push(AWS.S3.ManagedUpload); + } + } + AWS.util.addPromises(constructors, PromisesDependency); + }, + + /** + * Gets the promise dependency set by `AWS.config.setPromisesDependency`. + */ + getPromisesDependency: function getPromisesDependency() { + return PromisesDependency; + } +}); + +/** + * @return [AWS.Config] The global configuration object singleton instance + * @readonly + * @see AWS.Config + */ +AWS.config = new AWS.Config(); + + +/***/ }), + +/***/ 85566: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +/** + * @api private + */ +function validateRegionalEndpointsFlagValue(configValue, errorOptions) { + if (typeof configValue !== 'string') return undefined; + else if (['legacy', 'regional'].indexOf(configValue.toLowerCase()) >= 0) { + return configValue.toLowerCase(); + } else { + throw AWS.util.error(new Error(), errorOptions); + } +} + +/** + * Resolve the configuration value for regional endpoint from difference sources: client + * config, environmental variable, shared config file. Value can be case-insensitive + * 'legacy' or 'reginal'. + * @param originalConfig user-supplied config object to resolve + * @param options a map of config property names from individual configuration source + * - env: name of environmental variable that refers to the config + * - sharedConfig: name of shared configuration file property that refers to the config + * - clientConfig: name of client configuration property that refers to the config + * + * @api private + */ +function resolveRegionalEndpointsFlag(originalConfig, options) { + originalConfig = originalConfig || {}; + //validate config value + var resolved; + if (originalConfig[options.clientConfig]) { + resolved = validateRegionalEndpointsFlagValue(originalConfig[options.clientConfig], { + code: 'InvalidConfiguration', + message: 'invalid "' + options.clientConfig + '" configuration. Expect "legacy" ' + + ' or "regional". Got "' + originalConfig[options.clientConfig] + '".' + }); + if (resolved) return resolved; + } + if (!AWS.util.isNode()) return resolved; + //validate environmental variable + if (Object.prototype.hasOwnProperty.call(process.env, options.env)) { + var envFlag = process.env[options.env]; + resolved = validateRegionalEndpointsFlagValue(envFlag, { + code: 'InvalidEnvironmentalVariable', + message: 'invalid ' + options.env + ' environmental variable. Expect "legacy" ' + + ' or "regional". Got "' + process.env[options.env] + '".' + }); + if (resolved) return resolved; + } + //validate shared config file + var profile = {}; + try { + var profiles = AWS.util.getProfilesFromSharedConfig(AWS.util.iniLoader); + profile = profiles[process.env.AWS_PROFILE || AWS.util.defaultProfile]; + } catch (e) {}; + if (profile && Object.prototype.hasOwnProperty.call(profile, options.sharedConfig)) { + var fileFlag = profile[options.sharedConfig]; + resolved = validateRegionalEndpointsFlagValue(fileFlag, { + code: 'InvalidConfiguration', + message: 'invalid ' + options.sharedConfig + ' profile config. Expect "legacy" ' + + ' or "regional". Got "' + profile[options.sharedConfig] + '".' + }); + if (resolved) return resolved; + } + return resolved; +} + +module.exports = resolveRegionalEndpointsFlag; + + +/***/ }), + +/***/ 28437: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/** + * The main AWS namespace + */ +var AWS = { util: __nccwpck_require__(77985) }; + +/** + * @api private + * @!macro [new] nobrowser + * @note This feature is not supported in the browser environment of the SDK. + */ +var _hidden = {}; _hidden.toString(); // hack to parse macro + +/** + * @api private + */ +module.exports = AWS; + +AWS.util.update(AWS, { + + /** + * @constant + */ + VERSION: '2.1535.0', + + /** + * @api private + */ + Signers: {}, + + /** + * @api private + */ + Protocol: { + Json: __nccwpck_require__(30083), + Query: __nccwpck_require__(90761), + Rest: __nccwpck_require__(98200), + RestJson: __nccwpck_require__(5883), + RestXml: __nccwpck_require__(15143) + }, + + /** + * @api private + */ + XML: { + Builder: __nccwpck_require__(23546), + Parser: null // conditionally set based on environment + }, + + /** + * @api private + */ + JSON: { + Builder: __nccwpck_require__(47495), + Parser: __nccwpck_require__(5474) + }, + + /** + * @api private + */ + Model: { + Api: __nccwpck_require__(17657), + Operation: __nccwpck_require__(28083), + Shape: __nccwpck_require__(71349), + Paginator: __nccwpck_require__(45938), + ResourceWaiter: __nccwpck_require__(41368) + }, + + /** + * @api private + */ + apiLoader: __nccwpck_require__(52793), + + /** + * @api private + */ + EndpointCache: (__nccwpck_require__(96323)/* .EndpointCache */ .$) +}); +__nccwpck_require__(55948); +__nccwpck_require__(68903); +__nccwpck_require__(38110); +__nccwpck_require__(1556); +__nccwpck_require__(54995); +__nccwpck_require__(78652); +__nccwpck_require__(58743); +__nccwpck_require__(39925); +__nccwpck_require__(9897); +__nccwpck_require__(99127); +__nccwpck_require__(93985); + +/** + * @readonly + * @return [AWS.SequentialExecutor] a collection of global event listeners that + * are attached to every sent request. + * @see AWS.Request AWS.Request for a list of events to listen for + * @example Logging the time taken to send a request + * AWS.events.on('send', function startSend(resp) { + * resp.startTime = new Date().getTime(); + * }).on('complete', function calculateTime(resp) { + * var time = (new Date().getTime() - resp.startTime) / 1000; + * console.log('Request took ' + time + ' seconds'); + * }); + * + * new AWS.S3().listBuckets(); // prints 'Request took 0.285 seconds' + */ +AWS.events = new AWS.SequentialExecutor(); + +//create endpoint cache lazily +AWS.util.memoizedProperty(AWS, 'endpointCache', function() { + return new AWS.EndpointCache(AWS.config.endpointCacheSize); +}, true); + + +/***/ }), + +/***/ 53819: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +/** + * Represents your AWS security credentials, specifically the + * {accessKeyId}, {secretAccessKey}, and optional {sessionToken}. + * Creating a `Credentials` object allows you to pass around your + * security information to configuration and service objects. + * + * Note that this class typically does not need to be constructed manually, + * as the {AWS.Config} and {AWS.Service} classes both accept simple + * options hashes with the three keys. These structures will be converted + * into Credentials objects automatically. + * + * ## Expiring and Refreshing Credentials + * + * Occasionally credentials can expire in the middle of a long-running + * application. In this case, the SDK will automatically attempt to + * refresh the credentials from the storage location if the Credentials + * class implements the {refresh} method. + * + * If you are implementing a credential storage location, you + * will want to create a subclass of the `Credentials` class and + * override the {refresh} method. This method allows credentials to be + * retrieved from the backing store, be it a file system, database, or + * some network storage. The method should reset the credential attributes + * on the object. + * + * @!attribute expired + * @return [Boolean] whether the credentials have been expired and + * require a refresh. Used in conjunction with {expireTime}. + * @!attribute expireTime + * @return [Date] a time when credentials should be considered expired. Used + * in conjunction with {expired}. + * @!attribute accessKeyId + * @return [String] the AWS access key ID + * @!attribute secretAccessKey + * @return [String] the AWS secret access key + * @!attribute sessionToken + * @return [String] an optional AWS session token + */ +AWS.Credentials = AWS.util.inherit({ + /** + * A credentials object can be created using positional arguments or an options + * hash. + * + * @overload AWS.Credentials(accessKeyId, secretAccessKey, sessionToken=null) + * Creates a Credentials object with a given set of credential information + * as positional arguments. + * @param accessKeyId [String] the AWS access key ID + * @param secretAccessKey [String] the AWS secret access key + * @param sessionToken [String] the optional AWS session token + * @example Create a credentials object with AWS credentials + * var creds = new AWS.Credentials('akid', 'secret', 'session'); + * @overload AWS.Credentials(options) + * Creates a Credentials object with a given set of credential information + * as an options hash. + * @option options accessKeyId [String] the AWS access key ID + * @option options secretAccessKey [String] the AWS secret access key + * @option options sessionToken [String] the optional AWS session token + * @example Create a credentials object with AWS credentials + * var creds = new AWS.Credentials({ + * accessKeyId: 'akid', secretAccessKey: 'secret', sessionToken: 'session' + * }); + */ + constructor: function Credentials() { + // hide secretAccessKey from being displayed with util.inspect + AWS.util.hideProperties(this, ['secretAccessKey']); + + this.expired = false; + this.expireTime = null; + this.refreshCallbacks = []; + if (arguments.length === 1 && typeof arguments[0] === 'object') { + var creds = arguments[0].credentials || arguments[0]; + this.accessKeyId = creds.accessKeyId; + this.secretAccessKey = creds.secretAccessKey; + this.sessionToken = creds.sessionToken; + } else { + this.accessKeyId = arguments[0]; + this.secretAccessKey = arguments[1]; + this.sessionToken = arguments[2]; + } + }, + + /** + * @return [Integer] the number of seconds before {expireTime} during which + * the credentials will be considered expired. + */ + expiryWindow: 15, + + /** + * @return [Boolean] whether the credentials object should call {refresh} + * @note Subclasses should override this method to provide custom refresh + * logic. + */ + needsRefresh: function needsRefresh() { + var currentTime = AWS.util.date.getDate().getTime(); + var adjustedTime = new Date(currentTime + this.expiryWindow * 1000); + + if (this.expireTime && adjustedTime > this.expireTime) { + return true; + } else { + return this.expired || !this.accessKeyId || !this.secretAccessKey; + } + }, + + /** + * Gets the existing credentials, refreshing them if they are not yet loaded + * or have expired. Users should call this method before using {refresh}, + * as this will not attempt to reload credentials when they are already + * loaded into the object. + * + * @callback callback function(err) + * When this callback is called with no error, it means either credentials + * do not need to be refreshed or refreshed credentials information has + * been loaded into the object (as the `accessKeyId`, `secretAccessKey`, + * and `sessionToken` properties). + * @param err [Error] if an error occurred, this value will be filled + */ + get: function get(callback) { + var self = this; + if (this.needsRefresh()) { + this.refresh(function(err) { + if (!err) self.expired = false; // reset expired flag + if (callback) callback(err); + }); + } else if (callback) { + callback(); + } + }, + + /** + * @!method getPromise() + * Returns a 'thenable' promise. + * Gets the existing credentials, refreshing them if they are not yet loaded + * or have expired. Users should call this method before using {refresh}, + * as this will not attempt to reload credentials when they are already + * loaded into the object. + * + * Two callbacks can be provided to the `then` method on the returned promise. + * The first callback will be called if the promise is fulfilled, and the second + * callback will be called if the promise is rejected. + * @callback fulfilledCallback function() + * Called if the promise is fulfilled. When this callback is called, it + * means either credentials do not need to be refreshed or refreshed + * credentials information has been loaded into the object (as the + * `accessKeyId`, `secretAccessKey`, and `sessionToken` properties). + * @callback rejectedCallback function(err) + * Called if the promise is rejected. + * @param err [Error] if an error occurred, this value will be filled + * @return [Promise] A promise that represents the state of the `get` call. + * @example Calling the `getPromise` method. + * var promise = credProvider.getPromise(); + * promise.then(function() { ... }, function(err) { ... }); + */ + + /** + * @!method refreshPromise() + * Returns a 'thenable' promise. + * Refreshes the credentials. Users should call {get} before attempting + * to forcibly refresh credentials. + * + * Two callbacks can be provided to the `then` method on the returned promise. + * The first callback will be called if the promise is fulfilled, and the second + * callback will be called if the promise is rejected. + * @callback fulfilledCallback function() + * Called if the promise is fulfilled. When this callback is called, it + * means refreshed credentials information has been loaded into the object + * (as the `accessKeyId`, `secretAccessKey`, and `sessionToken` properties). + * @callback rejectedCallback function(err) + * Called if the promise is rejected. + * @param err [Error] if an error occurred, this value will be filled + * @return [Promise] A promise that represents the state of the `refresh` call. + * @example Calling the `refreshPromise` method. + * var promise = credProvider.refreshPromise(); + * promise.then(function() { ... }, function(err) { ... }); + */ + + /** + * Refreshes the credentials. Users should call {get} before attempting + * to forcibly refresh credentials. + * + * @callback callback function(err) + * When this callback is called with no error, it means refreshed + * credentials information has been loaded into the object (as the + * `accessKeyId`, `secretAccessKey`, and `sessionToken` properties). + * @param err [Error] if an error occurred, this value will be filled + * @note Subclasses should override this class to reset the + * {accessKeyId}, {secretAccessKey} and optional {sessionToken} + * on the credentials object and then call the callback with + * any error information. + * @see get + */ + refresh: function refresh(callback) { + this.expired = false; + callback(); + }, + + /** + * @api private + * @param callback + */ + coalesceRefresh: function coalesceRefresh(callback, sync) { + var self = this; + if (self.refreshCallbacks.push(callback) === 1) { + self.load(function onLoad(err) { + AWS.util.arrayEach(self.refreshCallbacks, function(callback) { + if (sync) { + callback(err); + } else { + // callback could throw, so defer to ensure all callbacks are notified + AWS.util.defer(function () { + callback(err); + }); + } + }); + self.refreshCallbacks.length = 0; + }); + } + }, + + /** + * @api private + * @param callback + */ + load: function load(callback) { + callback(); + } +}); + +/** + * @api private + */ +AWS.Credentials.addPromisesToClass = function addPromisesToClass(PromiseDependency) { + this.prototype.getPromise = AWS.util.promisifyMethod('get', PromiseDependency); + this.prototype.refreshPromise = AWS.util.promisifyMethod('refresh', PromiseDependency); +}; + +/** + * @api private + */ +AWS.Credentials.deletePromisesFromClass = function deletePromisesFromClass() { + delete this.prototype.getPromise; + delete this.prototype.refreshPromise; +}; + +AWS.util.addPromises(AWS.Credentials); + + +/***/ }), + +/***/ 57083: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var STS = __nccwpck_require__(57513); + +/** + * Represents temporary credentials retrieved from {AWS.STS}. Without any + * extra parameters, credentials will be fetched from the + * {AWS.STS.getSessionToken} operation. If an IAM role is provided, the + * {AWS.STS.assumeRole} operation will be used to fetch credentials for the + * role instead. + * + * AWS.ChainableTemporaryCredentials differs from AWS.TemporaryCredentials in + * the way masterCredentials and refreshes are handled. + * AWS.ChainableTemporaryCredentials refreshes expired credentials using the + * masterCredentials passed by the user to support chaining of STS credentials. + * However, AWS.TemporaryCredentials recursively collapses the masterCredentials + * during instantiation, precluding the ability to refresh credentials which + * require intermediate, temporary credentials. + * + * For example, if the application should use RoleA, which must be assumed from + * RoleB, and the environment provides credentials which can assume RoleB, then + * AWS.ChainableTemporaryCredentials must be used to support refreshing the + * temporary credentials for RoleA: + * + * ```javascript + * var roleACreds = new AWS.ChainableTemporaryCredentials({ + * params: {RoleArn: 'RoleA'}, + * masterCredentials: new AWS.ChainableTemporaryCredentials({ + * params: {RoleArn: 'RoleB'}, + * masterCredentials: new AWS.EnvironmentCredentials('AWS') + * }) + * }); + * ``` + * + * If AWS.TemporaryCredentials had been used in the previous example, + * `roleACreds` would fail to refresh because `roleACreds` would + * use the environment credentials for the AssumeRole request. + * + * Another difference is that AWS.ChainableTemporaryCredentials creates the STS + * service instance during instantiation while AWS.TemporaryCredentials creates + * the STS service instance during the first refresh. Creating the service + * instance during instantiation effectively captures the master credentials + * from the global config, so that subsequent changes to the global config do + * not affect the master credentials used to refresh the temporary credentials. + * + * This allows an instance of AWS.ChainableTemporaryCredentials to be assigned + * to AWS.config.credentials: + * + * ```javascript + * var envCreds = new AWS.EnvironmentCredentials('AWS'); + * AWS.config.credentials = envCreds; + * // masterCredentials will be envCreds + * AWS.config.credentials = new AWS.ChainableTemporaryCredentials({ + * params: {RoleArn: '...'} + * }); + * ``` + * + * Similarly, to use the CredentialProviderChain's default providers as the + * master credentials, simply create a new instance of + * AWS.ChainableTemporaryCredentials: + * + * ```javascript + * AWS.config.credentials = new ChainableTemporaryCredentials({ + * params: {RoleArn: '...'} + * }); + * ``` + * + * @!attribute service + * @return [AWS.STS] the STS service instance used to + * get and refresh temporary credentials from AWS STS. + * @note (see constructor) + */ +AWS.ChainableTemporaryCredentials = AWS.util.inherit(AWS.Credentials, { + /** + * Creates a new temporary credentials object. + * + * @param options [map] a set of options + * @option options params [map] ({}) a map of options that are passed to the + * {AWS.STS.assumeRole} or {AWS.STS.getSessionToken} operations. + * If a `RoleArn` parameter is passed in, credentials will be based on the + * IAM role. If a `SerialNumber` parameter is passed in, {tokenCodeFn} must + * also be passed in or an error will be thrown. + * @option options masterCredentials [AWS.Credentials] the master credentials + * used to get and refresh temporary credentials from AWS STS. By default, + * AWS.config.credentials or AWS.config.credentialProvider will be used. + * @option options tokenCodeFn [Function] (null) Function to provide + * `TokenCode`, if `SerialNumber` is provided for profile in {params}. Function + * is called with value of `SerialNumber` and `callback`, and should provide + * the `TokenCode` or an error to the callback in the format + * `callback(err, token)`. + * @example Creating a new credentials object for generic temporary credentials + * AWS.config.credentials = new AWS.ChainableTemporaryCredentials(); + * @example Creating a new credentials object for an IAM role + * AWS.config.credentials = new AWS.ChainableTemporaryCredentials({ + * params: { + * RoleArn: 'arn:aws:iam::1234567890:role/TemporaryCredentials' + * } + * }); + * @see AWS.STS.assumeRole + * @see AWS.STS.getSessionToken + */ + constructor: function ChainableTemporaryCredentials(options) { + AWS.Credentials.call(this); + options = options || {}; + this.errorCode = 'ChainableTemporaryCredentialsProviderFailure'; + this.expired = true; + this.tokenCodeFn = null; + + var params = AWS.util.copy(options.params) || {}; + if (params.RoleArn) { + params.RoleSessionName = params.RoleSessionName || 'temporary-credentials'; + } + if (params.SerialNumber) { + if (!options.tokenCodeFn || (typeof options.tokenCodeFn !== 'function')) { + throw new AWS.util.error( + new Error('tokenCodeFn must be a function when params.SerialNumber is given'), + {code: this.errorCode} + ); + } else { + this.tokenCodeFn = options.tokenCodeFn; + } + } + var config = AWS.util.merge( + { + params: params, + credentials: options.masterCredentials || AWS.config.credentials + }, + options.stsConfig || {} + ); + this.service = new STS(config); + }, + + /** + * Refreshes credentials using {AWS.STS.assumeRole} or + * {AWS.STS.getSessionToken}, depending on whether an IAM role ARN was passed + * to the credentials {constructor}. + * + * @callback callback function(err) + * Called when the STS service responds (or fails). When + * this callback is called with no error, it means that the credentials + * information has been loaded into the object (as the `accessKeyId`, + * `secretAccessKey`, and `sessionToken` properties). + * @param err [Error] if an error occurred, this value will be filled + * @see AWS.Credentials.get + */ + refresh: function refresh(callback) { + this.coalesceRefresh(callback || AWS.util.fn.callback); + }, + + /** + * @api private + * @param callback + */ + load: function load(callback) { + var self = this; + var operation = self.service.config.params.RoleArn ? 'assumeRole' : 'getSessionToken'; + this.getTokenCode(function (err, tokenCode) { + var params = {}; + if (err) { + callback(err); + return; + } + if (tokenCode) { + params.TokenCode = tokenCode; + } + self.service[operation](params, function (err, data) { + if (!err) { + self.service.credentialsFrom(data, self); + } + callback(err); + }); + }); + }, + + /** + * @api private + */ + getTokenCode: function getTokenCode(callback) { + var self = this; + if (this.tokenCodeFn) { + this.tokenCodeFn(this.service.config.params.SerialNumber, function (err, token) { + if (err) { + var message = err; + if (err instanceof Error) { + message = err.message; + } + callback( + AWS.util.error( + new Error('Error fetching MFA token: ' + message), + { code: self.errorCode} + ) + ); + return; + } + callback(null, token); + }); + } else { + callback(null); + } + } +}); + + +/***/ }), + +/***/ 3498: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var CognitoIdentity = __nccwpck_require__(58291); +var STS = __nccwpck_require__(57513); + +/** + * Represents credentials retrieved from STS Web Identity Federation using + * the Amazon Cognito Identity service. + * + * By default this provider gets credentials using the + * {AWS.CognitoIdentity.getCredentialsForIdentity} service operation, which + * requires either an `IdentityId` or an `IdentityPoolId` (Amazon Cognito + * Identity Pool ID), which is used to call {AWS.CognitoIdentity.getId} to + * obtain an `IdentityId`. If the identity or identity pool is not configured in + * the Amazon Cognito Console to use IAM roles with the appropriate permissions, + * then additionally a `RoleArn` is required containing the ARN of the IAM trust + * policy for the Amazon Cognito role that the user will log into. If a `RoleArn` + * is provided, then this provider gets credentials using the + * {AWS.STS.assumeRoleWithWebIdentity} service operation, after first getting an + * Open ID token from {AWS.CognitoIdentity.getOpenIdToken}. + * + * In addition, if this credential provider is used to provide authenticated + * login, the `Logins` map may be set to the tokens provided by the respective + * identity providers. See {constructor} for an example on creating a credentials + * object with proper property values. + * + * ## Refreshing Credentials from Identity Service + * + * In addition to AWS credentials expiring after a given amount of time, the + * login token from the identity provider will also expire. Once this token + * expires, it will not be usable to refresh AWS credentials, and another + * token will be needed. The SDK does not manage refreshing of the token value, + * but this can be done through a "refresh token" supported by most identity + * providers. Consult the documentation for the identity provider for refreshing + * tokens. Once the refreshed token is acquired, you should make sure to update + * this new token in the credentials object's {params} property. The following + * code will update the WebIdentityToken, assuming you have retrieved an updated + * token from the identity provider: + * + * ```javascript + * AWS.config.credentials.params.Logins['graph.facebook.com'] = updatedToken; + * ``` + * + * Future calls to `credentials.refresh()` will now use the new token. + * + * @!attribute params + * @return [map] the map of params passed to + * {AWS.CognitoIdentity.getId}, + * {AWS.CognitoIdentity.getOpenIdToken}, and + * {AWS.STS.assumeRoleWithWebIdentity}. To update the token, set the + * `params.WebIdentityToken` property. + * @!attribute data + * @return [map] the raw data response from the call to + * {AWS.CognitoIdentity.getCredentialsForIdentity}, or + * {AWS.STS.assumeRoleWithWebIdentity}. Use this if you want to get + * access to other properties from the response. + * @!attribute identityId + * @return [String] the Cognito ID returned by the last call to + * {AWS.CognitoIdentity.getOpenIdToken}. This ID represents the actual + * final resolved identity ID from Amazon Cognito. + */ +AWS.CognitoIdentityCredentials = AWS.util.inherit(AWS.Credentials, { + /** + * @api private + */ + localStorageKey: { + id: 'aws.cognito.identity-id.', + providers: 'aws.cognito.identity-providers.' + }, + + /** + * Creates a new credentials object. + * @example Creating a new credentials object + * AWS.config.credentials = new AWS.CognitoIdentityCredentials({ + * + * // either IdentityPoolId or IdentityId is required + * // See the IdentityPoolId param for AWS.CognitoIdentity.getID (linked below) + * // See the IdentityId param for AWS.CognitoIdentity.getCredentialsForIdentity + * // or AWS.CognitoIdentity.getOpenIdToken (linked below) + * IdentityPoolId: 'us-east-1:1699ebc0-7900-4099-b910-2df94f52a030', + * IdentityId: 'us-east-1:128d0a74-c82f-4553-916d-90053e4a8b0f' + * + * // optional, only necessary when the identity pool is not configured + * // to use IAM roles in the Amazon Cognito Console + * // See the RoleArn param for AWS.STS.assumeRoleWithWebIdentity (linked below) + * RoleArn: 'arn:aws:iam::1234567890:role/MYAPP-CognitoIdentity', + * + * // optional tokens, used for authenticated login + * // See the Logins param for AWS.CognitoIdentity.getID (linked below) + * Logins: { + * 'graph.facebook.com': 'FBTOKEN', + * 'www.amazon.com': 'AMAZONTOKEN', + * 'accounts.google.com': 'GOOGLETOKEN', + * 'api.twitter.com': 'TWITTERTOKEN', + * 'www.digits.com': 'DIGITSTOKEN' + * }, + * + * // optional name, defaults to web-identity + * // See the RoleSessionName param for AWS.STS.assumeRoleWithWebIdentity (linked below) + * RoleSessionName: 'web', + * + * // optional, only necessary when application runs in a browser + * // and multiple users are signed in at once, used for caching + * LoginId: 'example@gmail.com' + * + * }, { + * // optionally provide configuration to apply to the underlying service clients + * // if configuration is not provided, then configuration will be pulled from AWS.config + * + * // region should match the region your identity pool is located in + * region: 'us-east-1', + * + * // specify timeout options + * httpOptions: { + * timeout: 100 + * } + * }); + * @see AWS.CognitoIdentity.getId + * @see AWS.CognitoIdentity.getCredentialsForIdentity + * @see AWS.STS.assumeRoleWithWebIdentity + * @see AWS.CognitoIdentity.getOpenIdToken + * @see AWS.Config + * @note If a region is not provided in the global AWS.config, or + * specified in the `clientConfig` to the CognitoIdentityCredentials + * constructor, you may encounter a 'Missing credentials in config' error + * when calling making a service call. + */ + constructor: function CognitoIdentityCredentials(params, clientConfig) { + AWS.Credentials.call(this); + this.expired = true; + this.params = params; + this.data = null; + this._identityId = null; + this._clientConfig = AWS.util.copy(clientConfig || {}); + this.loadCachedId(); + var self = this; + Object.defineProperty(this, 'identityId', { + get: function() { + self.loadCachedId(); + return self._identityId || self.params.IdentityId; + }, + set: function(identityId) { + self._identityId = identityId; + } + }); + }, + + /** + * Refreshes credentials using {AWS.CognitoIdentity.getCredentialsForIdentity}, + * or {AWS.STS.assumeRoleWithWebIdentity}. + * + * @callback callback function(err) + * Called when the STS service responds (or fails). When + * this callback is called with no error, it means that the credentials + * information has been loaded into the object (as the `accessKeyId`, + * `secretAccessKey`, and `sessionToken` properties). + * @param err [Error] if an error occurred, this value will be filled + * @see AWS.Credentials.get + */ + refresh: function refresh(callback) { + this.coalesceRefresh(callback || AWS.util.fn.callback); + }, + + /** + * @api private + * @param callback + */ + load: function load(callback) { + var self = this; + self.createClients(); + self.data = null; + self._identityId = null; + self.getId(function(err) { + if (!err) { + if (!self.params.RoleArn) { + self.getCredentialsForIdentity(callback); + } else { + self.getCredentialsFromSTS(callback); + } + } else { + self.clearIdOnNotAuthorized(err); + callback(err); + } + }); + }, + + /** + * Clears the cached Cognito ID associated with the currently configured + * identity pool ID. Use this to manually invalidate your cache if + * the identity pool ID was deleted. + */ + clearCachedId: function clearCache() { + this._identityId = null; + delete this.params.IdentityId; + + var poolId = this.params.IdentityPoolId; + var loginId = this.params.LoginId || ''; + delete this.storage[this.localStorageKey.id + poolId + loginId]; + delete this.storage[this.localStorageKey.providers + poolId + loginId]; + }, + + /** + * @api private + */ + clearIdOnNotAuthorized: function clearIdOnNotAuthorized(err) { + var self = this; + if (err.code == 'NotAuthorizedException') { + self.clearCachedId(); + } + }, + + /** + * Retrieves a Cognito ID, loading from cache if it was already retrieved + * on this device. + * + * @callback callback function(err, identityId) + * @param err [Error, null] an error object if the call failed or null if + * it succeeded. + * @param identityId [String, null] if successful, the callback will return + * the Cognito ID. + * @note If not loaded explicitly, the Cognito ID is loaded and stored in + * localStorage in the browser environment of a device. + * @api private + */ + getId: function getId(callback) { + var self = this; + if (typeof self.params.IdentityId === 'string') { + return callback(null, self.params.IdentityId); + } + + self.cognito.getId(function(err, data) { + if (!err && data.IdentityId) { + self.params.IdentityId = data.IdentityId; + callback(null, data.IdentityId); + } else { + callback(err); + } + }); + }, + + + /** + * @api private + */ + loadCredentials: function loadCredentials(data, credentials) { + if (!data || !credentials) return; + credentials.expired = false; + credentials.accessKeyId = data.Credentials.AccessKeyId; + credentials.secretAccessKey = data.Credentials.SecretKey; + credentials.sessionToken = data.Credentials.SessionToken; + credentials.expireTime = data.Credentials.Expiration; + }, + + /** + * @api private + */ + getCredentialsForIdentity: function getCredentialsForIdentity(callback) { + var self = this; + self.cognito.getCredentialsForIdentity(function(err, data) { + if (!err) { + self.cacheId(data); + self.data = data; + self.loadCredentials(self.data, self); + } else { + self.clearIdOnNotAuthorized(err); + } + callback(err); + }); + }, + + /** + * @api private + */ + getCredentialsFromSTS: function getCredentialsFromSTS(callback) { + var self = this; + self.cognito.getOpenIdToken(function(err, data) { + if (!err) { + self.cacheId(data); + self.params.WebIdentityToken = data.Token; + self.webIdentityCredentials.refresh(function(webErr) { + if (!webErr) { + self.data = self.webIdentityCredentials.data; + self.sts.credentialsFrom(self.data, self); + } + callback(webErr); + }); + } else { + self.clearIdOnNotAuthorized(err); + callback(err); + } + }); + }, + + /** + * @api private + */ + loadCachedId: function loadCachedId() { + var self = this; + + // in the browser we source default IdentityId from localStorage + if (AWS.util.isBrowser() && !self.params.IdentityId) { + var id = self.getStorage('id'); + if (id && self.params.Logins) { + var actualProviders = Object.keys(self.params.Logins); + var cachedProviders = + (self.getStorage('providers') || '').split(','); + + // only load ID if at least one provider used this ID before + var intersect = cachedProviders.filter(function(n) { + return actualProviders.indexOf(n) !== -1; + }); + if (intersect.length !== 0) { + self.params.IdentityId = id; + } + } else if (id) { + self.params.IdentityId = id; + } + } + }, + + /** + * @api private + */ + createClients: function() { + var clientConfig = this._clientConfig; + this.webIdentityCredentials = this.webIdentityCredentials || + new AWS.WebIdentityCredentials(this.params, clientConfig); + if (!this.cognito) { + var cognitoConfig = AWS.util.merge({}, clientConfig); + cognitoConfig.params = this.params; + this.cognito = new CognitoIdentity(cognitoConfig); + } + this.sts = this.sts || new STS(clientConfig); + }, + + /** + * @api private + */ + cacheId: function cacheId(data) { + this._identityId = data.IdentityId; + this.params.IdentityId = this._identityId; + + // cache this IdentityId in browser localStorage if possible + if (AWS.util.isBrowser()) { + this.setStorage('id', data.IdentityId); + + if (this.params.Logins) { + this.setStorage('providers', Object.keys(this.params.Logins).join(',')); + } + } + }, + + /** + * @api private + */ + getStorage: function getStorage(key) { + return this.storage[this.localStorageKey[key] + this.params.IdentityPoolId + (this.params.LoginId || '')]; + }, + + /** + * @api private + */ + setStorage: function setStorage(key, val) { + try { + this.storage[this.localStorageKey[key] + this.params.IdentityPoolId + (this.params.LoginId || '')] = val; + } catch (_) {} + }, + + /** + * @api private + */ + storage: (function() { + try { + var storage = AWS.util.isBrowser() && window.localStorage !== null && typeof window.localStorage === 'object' ? + window.localStorage : {}; + + // Test set/remove which would throw an error in Safari's private browsing + storage['aws.test-storage'] = 'foobar'; + delete storage['aws.test-storage']; + + return storage; + } catch (_) { + return {}; + } + })() +}); + + +/***/ }), + +/***/ 36965: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +/** + * Creates a credential provider chain that searches for AWS credentials + * in a list of credential providers specified by the {providers} property. + * + * By default, the chain will use the {defaultProviders} to resolve credentials. + * These providers will look in the environment using the + * {AWS.EnvironmentCredentials} class with the 'AWS' and 'AMAZON' prefixes. + * + * ## Setting Providers + * + * Each provider in the {providers} list should be a function that returns + * a {AWS.Credentials} object, or a hardcoded credentials object. The function + * form allows for delayed execution of the credential construction. + * + * ## Resolving Credentials from a Chain + * + * Call {resolve} to return the first valid credential object that can be + * loaded by the provider chain. + * + * For example, to resolve a chain with a custom provider that checks a file + * on disk after the set of {defaultProviders}: + * + * ```javascript + * var diskProvider = new AWS.FileSystemCredentials('./creds.json'); + * var chain = new AWS.CredentialProviderChain(); + * chain.providers.push(diskProvider); + * chain.resolve(); + * ``` + * + * The above code will return the `diskProvider` object if the + * file contains credentials and the `defaultProviders` do not contain + * any credential settings. + * + * @!attribute providers + * @return [Array] + * a list of credentials objects or functions that return credentials + * objects. If the provider is a function, the function will be + * executed lazily when the provider needs to be checked for valid + * credentials. By default, this object will be set to the + * {defaultProviders}. + * @see defaultProviders + */ +AWS.CredentialProviderChain = AWS.util.inherit(AWS.Credentials, { + + /** + * Creates a new CredentialProviderChain with a default set of providers + * specified by {defaultProviders}. + */ + constructor: function CredentialProviderChain(providers) { + if (providers) { + this.providers = providers; + } else { + this.providers = AWS.CredentialProviderChain.defaultProviders.slice(0); + } + this.resolveCallbacks = []; + }, + + /** + * @!method resolvePromise() + * Returns a 'thenable' promise. + * Resolves the provider chain by searching for the first set of + * credentials in {providers}. + * + * Two callbacks can be provided to the `then` method on the returned promise. + * The first callback will be called if the promise is fulfilled, and the second + * callback will be called if the promise is rejected. + * @callback fulfilledCallback function(credentials) + * Called if the promise is fulfilled and the provider resolves the chain + * to a credentials object + * @param credentials [AWS.Credentials] the credentials object resolved + * by the provider chain. + * @callback rejectedCallback function(error) + * Called if the promise is rejected. + * @param err [Error] the error object returned if no credentials are found. + * @return [Promise] A promise that represents the state of the `resolve` method call. + * @example Calling the `resolvePromise` method. + * var promise = chain.resolvePromise(); + * promise.then(function(credentials) { ... }, function(err) { ... }); + */ + + /** + * Resolves the provider chain by searching for the first set of + * credentials in {providers}. + * + * @callback callback function(err, credentials) + * Called when the provider resolves the chain to a credentials object + * or null if no credentials can be found. + * + * @param err [Error] the error object returned if no credentials are + * found. + * @param credentials [AWS.Credentials] the credentials object resolved + * by the provider chain. + * @return [AWS.CredentialProviderChain] the provider, for chaining. + */ + resolve: function resolve(callback) { + var self = this; + if (self.providers.length === 0) { + callback(new Error('No providers')); + return self; + } + + if (self.resolveCallbacks.push(callback) === 1) { + var index = 0; + var providers = self.providers.slice(0); + + function resolveNext(err, creds) { + if ((!err && creds) || index === providers.length) { + AWS.util.arrayEach(self.resolveCallbacks, function (callback) { + callback(err, creds); + }); + self.resolveCallbacks.length = 0; + return; + } + + var provider = providers[index++]; + if (typeof provider === 'function') { + creds = provider.call(); + } else { + creds = provider; + } + + if (creds.get) { + creds.get(function (getErr) { + resolveNext(getErr, getErr ? null : creds); + }); + } else { + resolveNext(null, creds); + } + } + + resolveNext(); + } + + return self; + } +}); + +/** + * The default set of providers used by a vanilla CredentialProviderChain. + * + * In the browser: + * + * ```javascript + * AWS.CredentialProviderChain.defaultProviders = [] + * ``` + * + * In Node.js: + * + * ```javascript + * AWS.CredentialProviderChain.defaultProviders = [ + * function () { return new AWS.EnvironmentCredentials('AWS'); }, + * function () { return new AWS.EnvironmentCredentials('AMAZON'); }, + * function () { return new AWS.SsoCredentials(); }, + * function () { return new AWS.SharedIniFileCredentials(); }, + * function () { return new AWS.ECSCredentials(); }, + * function () { return new AWS.ProcessCredentials(); }, + * function () { return new AWS.TokenFileWebIdentityCredentials(); }, + * function () { return new AWS.EC2MetadataCredentials() } + * ] + * ``` + */ +AWS.CredentialProviderChain.defaultProviders = []; + +/** + * @api private + */ +AWS.CredentialProviderChain.addPromisesToClass = function addPromisesToClass(PromiseDependency) { + this.prototype.resolvePromise = AWS.util.promisifyMethod('resolve', PromiseDependency); +}; + +/** + * @api private + */ +AWS.CredentialProviderChain.deletePromisesFromClass = function deletePromisesFromClass() { + delete this.prototype.resolvePromise; +}; + +AWS.util.addPromises(AWS.CredentialProviderChain); + + +/***/ }), + +/***/ 73379: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +__nccwpck_require__(25768); + +/** + * Represents credentials received from the metadata service on an EC2 instance. + * + * By default, this class will connect to the metadata service using + * {AWS.MetadataService} and attempt to load any available credentials. If it + * can connect, and credentials are available, these will be used with zero + * configuration. + * + * This credentials class will by default timeout after 1 second of inactivity + * and retry 3 times. + * If your requests to the EC2 metadata service are timing out, you can increase + * these values by configuring them directly: + * + * ```javascript + * AWS.config.credentials = new AWS.EC2MetadataCredentials({ + * httpOptions: { timeout: 5000 }, // 5 second timeout + * maxRetries: 10, // retry 10 times + * retryDelayOptions: { base: 200 }, // see AWS.Config for information + * logger: console // see AWS.Config for information + * ec2MetadataV1Disabled: false // whether to block IMDS v1 fallback. + * }); + * ``` + * + * If your requests are timing out in connecting to the metadata service, such + * as when testing on a development machine, you can use the connectTimeout + * option, specified in milliseconds, which also defaults to 1 second. + * + * If the requests failed or returns expired credentials, it will + * extend the expiration of current credential, with a warning message. For more + * information, please go to: + * https://docs.aws.amazon.com/sdkref/latest/guide/feature-static-credentials.html + * + * @!attribute originalExpiration + * @return [Date] The optional original expiration of the current credential. + * In case of AWS outage, the EC2 metadata will extend expiration of the + * existing credential. + * + * @see AWS.Config.retryDelayOptions + * @see AWS.Config.logger + * + * @!macro nobrowser + */ +AWS.EC2MetadataCredentials = AWS.util.inherit(AWS.Credentials, { + constructor: function EC2MetadataCredentials(options) { + AWS.Credentials.call(this); + + options = options ? AWS.util.copy(options) : {}; + options = AWS.util.merge( + {maxRetries: this.defaultMaxRetries}, options); + if (!options.httpOptions) options.httpOptions = {}; + options.httpOptions = AWS.util.merge( + {timeout: this.defaultTimeout, + connectTimeout: this.defaultConnectTimeout}, + options.httpOptions); + + this.metadataService = new AWS.MetadataService(options); + this.logger = options.logger || AWS.config && AWS.config.logger; + }, + + /** + * @api private + */ + defaultTimeout: 1000, + + /** + * @api private + */ + defaultConnectTimeout: 1000, + + /** + * @api private + */ + defaultMaxRetries: 3, + + /** + * The original expiration of the current credential. In case of AWS + * outage, the EC2 metadata will extend expiration of the existing + * credential. + */ + originalExpiration: undefined, + + /** + * Loads the credentials from the instance metadata service + * + * @callback callback function(err) + * Called when the instance metadata service responds (or fails). When + * this callback is called with no error, it means that the credentials + * information has been loaded into the object (as the `accessKeyId`, + * `secretAccessKey`, and `sessionToken` properties). + * @param err [Error] if an error occurred, this value will be filled + * @see get + */ + refresh: function refresh(callback) { + this.coalesceRefresh(callback || AWS.util.fn.callback); + }, + + /** + * @api private + * @param callback + */ + load: function load(callback) { + var self = this; + self.metadataService.loadCredentials(function(err, creds) { + if (err) { + if (self.hasLoadedCredentials()) { + self.extendExpirationIfExpired(); + callback(); + } else { + callback(err); + } + } else { + self.setCredentials(creds); + self.extendExpirationIfExpired(); + callback(); + } + }); + }, + + /** + * Whether this credential has been loaded. + * @api private + */ + hasLoadedCredentials: function hasLoadedCredentials() { + return this.AccessKeyId && this.secretAccessKey; + }, + + /** + * if expired, extend the expiration by 15 minutes base plus a jitter of 5 + * minutes range. + * @api private + */ + extendExpirationIfExpired: function extendExpirationIfExpired() { + if (this.needsRefresh()) { + this.originalExpiration = this.originalExpiration || this.expireTime; + this.expired = false; + var nextTimeout = 15 * 60 + Math.floor(Math.random() * 5 * 60); + var currentTime = AWS.util.date.getDate().getTime(); + this.expireTime = new Date(currentTime + nextTimeout * 1000); + // TODO: add doc link; + this.logger.warn('Attempting credential expiration extension due to a ' + + 'credential service availability issue. A refresh of these ' + + 'credentials will be attempted again at ' + this.expireTime + + '\nFor more information, please visit: https://docs.aws.amazon.com/sdkref/latest/guide/feature-static-credentials.html'); + } + }, + + /** + * Update the credential with new credential responded from EC2 metadata + * service. + * @api private + */ + setCredentials: function setCredentials(creds) { + var currentTime = AWS.util.date.getDate().getTime(); + var expireTime = new Date(creds.Expiration); + this.expired = currentTime >= expireTime ? true : false; + this.metadata = creds; + this.accessKeyId = creds.AccessKeyId; + this.secretAccessKey = creds.SecretAccessKey; + this.sessionToken = creds.Token; + this.expireTime = expireTime; + } +}); + + +/***/ }), + +/***/ 10645: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +/** + * Represents credentials received from relative URI specified in the ECS container. + * + * This class will request refreshable credentials from the relative URI + * specified by the AWS_CONTAINER_CREDENTIALS_RELATIVE_URI or the + * AWS_CONTAINER_CREDENTIALS_FULL_URI environment variable. If valid credentials + * are returned in the response, these will be used with zero configuration. + * + * This credentials class will by default timeout after 1 second of inactivity + * and retry 3 times. + * If your requests to the relative URI are timing out, you can increase + * the value by configuring them directly: + * + * ```javascript + * AWS.config.credentials = new AWS.ECSCredentials({ + * httpOptions: { timeout: 5000 }, // 5 second timeout + * maxRetries: 10, // retry 10 times + * retryDelayOptions: { base: 200 } // see AWS.Config for information + * }); + * ``` + * + * @see AWS.Config.retryDelayOptions + * + * @!macro nobrowser + */ +AWS.ECSCredentials = AWS.RemoteCredentials; + + +/***/ }), + +/***/ 57714: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +/** + * Represents credentials from the environment. + * + * By default, this class will look for the matching environment variables + * prefixed by a given {envPrefix}. The un-prefixed environment variable names + * for each credential value is listed below: + * + * ```javascript + * accessKeyId: ACCESS_KEY_ID + * secretAccessKey: SECRET_ACCESS_KEY + * sessionToken: SESSION_TOKEN + * ``` + * + * With the default prefix of 'AWS', the environment variables would be: + * + * AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_SESSION_TOKEN + * + * @!attribute envPrefix + * @readonly + * @return [String] the prefix for the environment variable names excluding + * the separating underscore ('_'). + */ +AWS.EnvironmentCredentials = AWS.util.inherit(AWS.Credentials, { + + /** + * Creates a new EnvironmentCredentials class with a given variable + * prefix {envPrefix}. For example, to load credentials using the 'AWS' + * prefix: + * + * ```javascript + * var creds = new AWS.EnvironmentCredentials('AWS'); + * creds.accessKeyId == 'AKID' // from AWS_ACCESS_KEY_ID env var + * ``` + * + * @param envPrefix [String] the prefix to use (e.g., 'AWS') for environment + * variables. Do not include the separating underscore. + */ + constructor: function EnvironmentCredentials(envPrefix) { + AWS.Credentials.call(this); + this.envPrefix = envPrefix; + this.get(function() {}); + }, + + /** + * Loads credentials from the environment using the prefixed + * environment variables. + * + * @callback callback function(err) + * Called after the (prefixed) ACCESS_KEY_ID, SECRET_ACCESS_KEY, and + * SESSION_TOKEN environment variables are read. When this callback is + * called with no error, it means that the credentials information has + * been loaded into the object (as the `accessKeyId`, `secretAccessKey`, + * and `sessionToken` properties). + * @param err [Error] if an error occurred, this value will be filled + * @see get + */ + refresh: function refresh(callback) { + if (!callback) callback = AWS.util.fn.callback; + + if (!process || !process.env) { + callback(AWS.util.error( + new Error('No process info or environment variables available'), + { code: 'EnvironmentCredentialsProviderFailure' } + )); + return; + } + + var keys = ['ACCESS_KEY_ID', 'SECRET_ACCESS_KEY', 'SESSION_TOKEN']; + var values = []; + + for (var i = 0; i < keys.length; i++) { + var prefix = ''; + if (this.envPrefix) prefix = this.envPrefix + '_'; + values[i] = process.env[prefix + keys[i]]; + if (!values[i] && keys[i] !== 'SESSION_TOKEN') { + callback(AWS.util.error( + new Error('Variable ' + prefix + keys[i] + ' not set.'), + { code: 'EnvironmentCredentialsProviderFailure' } + )); + return; + } + } + + this.expired = false; + AWS.Credentials.apply(this, values); + callback(); + } + +}); + + +/***/ }), + +/***/ 27454: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +/** + * Represents credentials from a JSON file on disk. + * If the credentials expire, the SDK can {refresh} the credentials + * from the file. + * + * The format of the file should be similar to the options passed to + * {AWS.Config}: + * + * ```javascript + * {accessKeyId: 'akid', secretAccessKey: 'secret', sessionToken: 'optional'} + * ``` + * + * @example Loading credentials from disk + * var creds = new AWS.FileSystemCredentials('./configuration.json'); + * creds.accessKeyId == 'AKID' + * + * @!attribute filename + * @readonly + * @return [String] the path to the JSON file on disk containing the + * credentials. + * @!macro nobrowser + */ +AWS.FileSystemCredentials = AWS.util.inherit(AWS.Credentials, { + + /** + * @overload AWS.FileSystemCredentials(filename) + * Creates a new FileSystemCredentials object from a filename + * + * @param filename [String] the path on disk to the JSON file to load. + */ + constructor: function FileSystemCredentials(filename) { + AWS.Credentials.call(this); + this.filename = filename; + this.get(function() {}); + }, + + /** + * Loads the credentials from the {filename} on disk. + * + * @callback callback function(err) + * Called after the JSON file on disk is read and parsed. When this callback + * is called with no error, it means that the credentials information + * has been loaded into the object (as the `accessKeyId`, `secretAccessKey`, + * and `sessionToken` properties). + * @param err [Error] if an error occurred, this value will be filled + * @see get + */ + refresh: function refresh(callback) { + if (!callback) callback = AWS.util.fn.callback; + try { + var creds = JSON.parse(AWS.util.readFileSync(this.filename)); + AWS.Credentials.call(this, creds); + if (!this.accessKeyId || !this.secretAccessKey) { + throw AWS.util.error( + new Error('Credentials not set in ' + this.filename), + { code: 'FileSystemCredentialsProviderFailure' } + ); + } + this.expired = false; + callback(); + } catch (err) { + callback(err); + } + } + +}); + + +/***/ }), + +/***/ 80371: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var proc = __nccwpck_require__(32081); +var iniLoader = AWS.util.iniLoader; + +/** + * Represents credentials loaded from shared credentials file + * (defaulting to ~/.aws/credentials or defined by the + * `AWS_SHARED_CREDENTIALS_FILE` environment variable). + * + * ## Using process credentials + * + * The credentials file can specify a credential provider that executes + * a given process and attempts to read its stdout to recieve a JSON payload + * containing the credentials: + * + * [default] + * credential_process = /usr/bin/credential_proc + * + * Automatically handles refreshing credentials if an Expiration time is + * provided in the credentials payload. Credentials supplied in the same profile + * will take precedence over the credential_process. + * + * Sourcing credentials from an external process can potentially be dangerous, + * so proceed with caution. Other credential providers should be preferred if + * at all possible. If using this option, you should make sure that the shared + * credentials file is as locked down as possible using security best practices + * for your operating system. + * + * ## Using custom profiles + * + * The SDK supports loading credentials for separate profiles. This can be done + * in two ways: + * + * 1. Set the `AWS_PROFILE` environment variable in your process prior to + * loading the SDK. + * 2. Directly load the AWS.ProcessCredentials provider: + * + * ```javascript + * var creds = new AWS.ProcessCredentials({profile: 'myprofile'}); + * AWS.config.credentials = creds; + * ``` + * + * @!macro nobrowser + */ +AWS.ProcessCredentials = AWS.util.inherit(AWS.Credentials, { + /** + * Creates a new ProcessCredentials object. + * + * @param options [map] a set of options + * @option options profile [String] (AWS_PROFILE env var or 'default') + * the name of the profile to load. + * @option options filename [String] ('~/.aws/credentials' or defined by + * AWS_SHARED_CREDENTIALS_FILE process env var) + * the filename to use when loading credentials. + * @option options callback [Function] (err) Credentials are eagerly loaded + * by the constructor. When the callback is called with no error, the + * credentials have been loaded successfully. + */ + constructor: function ProcessCredentials(options) { + AWS.Credentials.call(this); + + options = options || {}; + + this.filename = options.filename; + this.profile = options.profile || process.env.AWS_PROFILE || AWS.util.defaultProfile; + this.get(options.callback || AWS.util.fn.noop); + }, + + /** + * @api private + */ + load: function load(callback) { + var self = this; + try { + var profiles = AWS.util.getProfilesFromSharedConfig(iniLoader, this.filename); + var profile = profiles[this.profile] || {}; + + if (Object.keys(profile).length === 0) { + throw AWS.util.error( + new Error('Profile ' + this.profile + ' not found'), + { code: 'ProcessCredentialsProviderFailure' } + ); + } + + if (profile['credential_process']) { + this.loadViaCredentialProcess(profile, function(err, data) { + if (err) { + callback(err, null); + } else { + self.expired = false; + self.accessKeyId = data.AccessKeyId; + self.secretAccessKey = data.SecretAccessKey; + self.sessionToken = data.SessionToken; + if (data.Expiration) { + self.expireTime = new Date(data.Expiration); + } + callback(null); + } + }); + } else { + throw AWS.util.error( + new Error('Profile ' + this.profile + ' did not include credential process'), + { code: 'ProcessCredentialsProviderFailure' } + ); + } + } catch (err) { + callback(err); + } + }, + + /** + * Executes the credential_process and retrieves + * credentials from the output + * @api private + * @param profile [map] credentials profile + * @throws ProcessCredentialsProviderFailure + */ + loadViaCredentialProcess: function loadViaCredentialProcess(profile, callback) { + proc.exec(profile['credential_process'], { env: process.env }, function(err, stdOut, stdErr) { + if (err) { + callback(AWS.util.error( + new Error('credential_process returned error'), + { code: 'ProcessCredentialsProviderFailure'} + ), null); + } else { + try { + var credData = JSON.parse(stdOut); + if (credData.Expiration) { + var currentTime = AWS.util.date.getDate(); + var expireTime = new Date(credData.Expiration); + if (expireTime < currentTime) { + throw Error('credential_process returned expired credentials'); + } + } + + if (credData.Version !== 1) { + throw Error('credential_process does not return Version == 1'); + } + callback(null, credData); + } catch (err) { + callback(AWS.util.error( + new Error(err.message), + { code: 'ProcessCredentialsProviderFailure'} + ), null); + } + } + }); + }, + + /** + * Loads the credentials from the credential process + * + * @callback callback function(err) + * Called after the credential process has been executed. When this + * callback is called with no error, it means that the credentials + * information has been loaded into the object (as the `accessKeyId`, + * `secretAccessKey`, and `sessionToken` properties). + * @param err [Error] if an error occurred, this value will be filled + * @see get + */ + refresh: function refresh(callback) { + iniLoader.clearCachedFiles(); + this.coalesceRefresh(callback || AWS.util.fn.callback); + } +}); + + +/***/ }), + +/***/ 88764: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437), + ENV_RELATIVE_URI = 'AWS_CONTAINER_CREDENTIALS_RELATIVE_URI', + ENV_FULL_URI = 'AWS_CONTAINER_CREDENTIALS_FULL_URI', + ENV_AUTH_TOKEN = 'AWS_CONTAINER_AUTHORIZATION_TOKEN', + FULL_URI_UNRESTRICTED_PROTOCOLS = ['https:'], + FULL_URI_ALLOWED_PROTOCOLS = ['http:', 'https:'], + FULL_URI_ALLOWED_HOSTNAMES = ['localhost', '127.0.0.1'], + RELATIVE_URI_HOST = '169.254.170.2'; + +/** + * Represents credentials received from specified URI. + * + * This class will request refreshable credentials from the relative URI + * specified by the AWS_CONTAINER_CREDENTIALS_RELATIVE_URI or the + * AWS_CONTAINER_CREDENTIALS_FULL_URI environment variable. If valid credentials + * are returned in the response, these will be used with zero configuration. + * + * This credentials class will by default timeout after 1 second of inactivity + * and retry 3 times. + * If your requests to the relative URI are timing out, you can increase + * the value by configuring them directly: + * + * ```javascript + * AWS.config.credentials = new AWS.RemoteCredentials({ + * httpOptions: { timeout: 5000 }, // 5 second timeout + * maxRetries: 10, // retry 10 times + * retryDelayOptions: { base: 200 } // see AWS.Config for information + * }); + * ``` + * + * @see AWS.Config.retryDelayOptions + * + * @!macro nobrowser + */ +AWS.RemoteCredentials = AWS.util.inherit(AWS.Credentials, { + constructor: function RemoteCredentials(options) { + AWS.Credentials.call(this); + options = options ? AWS.util.copy(options) : {}; + if (!options.httpOptions) options.httpOptions = {}; + options.httpOptions = AWS.util.merge( + this.httpOptions, options.httpOptions); + AWS.util.update(this, options); + }, + + /** + * @api private + */ + httpOptions: { timeout: 1000 }, + + /** + * @api private + */ + maxRetries: 3, + + /** + * @api private + */ + isConfiguredForEcsCredentials: function isConfiguredForEcsCredentials() { + return Boolean( + process && + process.env && + (process.env[ENV_RELATIVE_URI] || process.env[ENV_FULL_URI]) + ); + }, + + /** + * @api private + */ + getECSFullUri: function getECSFullUri() { + if (process && process.env) { + var relative = process.env[ENV_RELATIVE_URI], + full = process.env[ENV_FULL_URI]; + if (relative) { + return 'http://' + RELATIVE_URI_HOST + relative; + } else if (full) { + var parsed = AWS.util.urlParse(full); + if (FULL_URI_ALLOWED_PROTOCOLS.indexOf(parsed.protocol) < 0) { + throw AWS.util.error( + new Error('Unsupported protocol: AWS.RemoteCredentials supports ' + + FULL_URI_ALLOWED_PROTOCOLS.join(',') + ' only; ' + + parsed.protocol + ' requested.'), + { code: 'ECSCredentialsProviderFailure' } + ); + } + + if (FULL_URI_UNRESTRICTED_PROTOCOLS.indexOf(parsed.protocol) < 0 && + FULL_URI_ALLOWED_HOSTNAMES.indexOf(parsed.hostname) < 0) { + throw AWS.util.error( + new Error('Unsupported hostname: AWS.RemoteCredentials only supports ' + + FULL_URI_ALLOWED_HOSTNAMES.join(',') + ' for ' + parsed.protocol + '; ' + + parsed.protocol + '//' + parsed.hostname + ' requested.'), + { code: 'ECSCredentialsProviderFailure' } + ); + } + + return full; + } else { + throw AWS.util.error( + new Error('Variable ' + ENV_RELATIVE_URI + ' or ' + ENV_FULL_URI + + ' must be set to use AWS.RemoteCredentials.'), + { code: 'ECSCredentialsProviderFailure' } + ); + } + } else { + throw AWS.util.error( + new Error('No process info available'), + { code: 'ECSCredentialsProviderFailure' } + ); + } + }, + + /** + * @api private + */ + getECSAuthToken: function getECSAuthToken() { + if (process && process.env && process.env[ENV_FULL_URI]) { + return process.env[ENV_AUTH_TOKEN]; + } + }, + + /** + * @api private + */ + credsFormatIsValid: function credsFormatIsValid(credData) { + return (!!credData.accessKeyId && !!credData.secretAccessKey && + !!credData.sessionToken && !!credData.expireTime); + }, + + /** + * @api private + */ + formatCreds: function formatCreds(credData) { + if (!!credData.credentials) { + credData = credData.credentials; + } + + return { + expired: false, + accessKeyId: credData.accessKeyId || credData.AccessKeyId, + secretAccessKey: credData.secretAccessKey || credData.SecretAccessKey, + sessionToken: credData.sessionToken || credData.Token, + expireTime: new Date(credData.expiration || credData.Expiration) + }; + }, + + /** + * @api private + */ + request: function request(url, callback) { + var httpRequest = new AWS.HttpRequest(url); + httpRequest.method = 'GET'; + httpRequest.headers.Accept = 'application/json'; + var token = this.getECSAuthToken(); + if (token) { + httpRequest.headers.Authorization = token; + } + AWS.util.handleRequestWithRetries(httpRequest, this, callback); + }, + + /** + * Loads the credentials from the relative URI specified by container + * + * @callback callback function(err) + * Called when the request to the relative URI responds (or fails). When + * this callback is called with no error, it means that the credentials + * information has been loaded into the object (as the `accessKeyId`, + * `secretAccessKey`, `sessionToken`, and `expireTime` properties). + * @param err [Error] if an error occurred, this value will be filled + * @see get + */ + refresh: function refresh(callback) { + this.coalesceRefresh(callback || AWS.util.fn.callback); + }, + + /** + * @api private + */ + load: function load(callback) { + var self = this; + var fullUri; + + try { + fullUri = this.getECSFullUri(); + } catch (err) { + callback(err); + return; + } + + this.request(fullUri, function(err, data) { + if (!err) { + try { + data = JSON.parse(data); + var creds = self.formatCreds(data); + if (!self.credsFormatIsValid(creds)) { + throw AWS.util.error( + new Error('Response data is not in valid format'), + { code: 'ECSCredentialsProviderFailure' } + ); + } + AWS.util.update(self, creds); + } catch (dataError) { + err = dataError; + } + } + callback(err, creds); + }); + } +}); + + +/***/ }), + +/***/ 15037: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var STS = __nccwpck_require__(57513); + +/** + * Represents credentials retrieved from STS SAML support. + * + * By default this provider gets credentials using the + * {AWS.STS.assumeRoleWithSAML} service operation. This operation + * requires a `RoleArn` containing the ARN of the IAM trust policy for the + * application for which credentials will be given, as well as a `PrincipalArn` + * representing the ARN for the SAML identity provider. In addition, the + * `SAMLAssertion` must be set to the token provided by the identity + * provider. See {constructor} for an example on creating a credentials + * object with proper `RoleArn`, `PrincipalArn`, and `SAMLAssertion` values. + * + * ## Refreshing Credentials from Identity Service + * + * In addition to AWS credentials expiring after a given amount of time, the + * login token from the identity provider will also expire. Once this token + * expires, it will not be usable to refresh AWS credentials, and another + * token will be needed. The SDK does not manage refreshing of the token value, + * but this can be done through a "refresh token" supported by most identity + * providers. Consult the documentation for the identity provider for refreshing + * tokens. Once the refreshed token is acquired, you should make sure to update + * this new token in the credentials object's {params} property. The following + * code will update the SAMLAssertion, assuming you have retrieved an updated + * token from the identity provider: + * + * ```javascript + * AWS.config.credentials.params.SAMLAssertion = updatedToken; + * ``` + * + * Future calls to `credentials.refresh()` will now use the new token. + * + * @!attribute params + * @return [map] the map of params passed to + * {AWS.STS.assumeRoleWithSAML}. To update the token, set the + * `params.SAMLAssertion` property. + */ +AWS.SAMLCredentials = AWS.util.inherit(AWS.Credentials, { + /** + * Creates a new credentials object. + * @param (see AWS.STS.assumeRoleWithSAML) + * @example Creating a new credentials object + * AWS.config.credentials = new AWS.SAMLCredentials({ + * RoleArn: 'arn:aws:iam::1234567890:role/SAMLRole', + * PrincipalArn: 'arn:aws:iam::1234567890:role/SAMLPrincipal', + * SAMLAssertion: 'base64-token', // base64-encoded token from IdP + * }); + * @see AWS.STS.assumeRoleWithSAML + */ + constructor: function SAMLCredentials(params) { + AWS.Credentials.call(this); + this.expired = true; + this.params = params; + }, + + /** + * Refreshes credentials using {AWS.STS.assumeRoleWithSAML} + * + * @callback callback function(err) + * Called when the STS service responds (or fails). When + * this callback is called with no error, it means that the credentials + * information has been loaded into the object (as the `accessKeyId`, + * `secretAccessKey`, and `sessionToken` properties). + * @param err [Error] if an error occurred, this value will be filled + * @see get + */ + refresh: function refresh(callback) { + this.coalesceRefresh(callback || AWS.util.fn.callback); + }, + + /** + * @api private + */ + load: function load(callback) { + var self = this; + self.createClients(); + self.service.assumeRoleWithSAML(function (err, data) { + if (!err) { + self.service.credentialsFrom(data, self); + } + callback(err); + }); + }, + + /** + * @api private + */ + createClients: function() { + this.service = this.service || new STS({params: this.params}); + } + +}); + + +/***/ }), + +/***/ 13754: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var STS = __nccwpck_require__(57513); +var iniLoader = AWS.util.iniLoader; + +var ASSUME_ROLE_DEFAULT_REGION = 'us-east-1'; + +/** + * Represents credentials loaded from shared credentials file + * (defaulting to ~/.aws/credentials or defined by the + * `AWS_SHARED_CREDENTIALS_FILE` environment variable). + * + * ## Using the shared credentials file + * + * This provider is checked by default in the Node.js environment. To use the + * credentials file provider, simply add your access and secret keys to the + * ~/.aws/credentials file in the following format: + * + * [default] + * aws_access_key_id = AKID... + * aws_secret_access_key = YOUR_SECRET_KEY + * + * ## Using custom profiles + * + * The SDK supports loading credentials for separate profiles. This can be done + * in two ways: + * + * 1. Set the `AWS_PROFILE` environment variable in your process prior to + * loading the SDK. + * 2. Directly load the AWS.SharedIniFileCredentials provider: + * + * ```javascript + * var creds = new AWS.SharedIniFileCredentials({profile: 'myprofile'}); + * AWS.config.credentials = creds; + * ``` + * + * @!macro nobrowser + */ +AWS.SharedIniFileCredentials = AWS.util.inherit(AWS.Credentials, { + /** + * Creates a new SharedIniFileCredentials object. + * + * @param options [map] a set of options + * @option options profile [String] (AWS_PROFILE env var or 'default') + * the name of the profile to load. + * @option options filename [String] ('~/.aws/credentials' or defined by + * AWS_SHARED_CREDENTIALS_FILE process env var) + * the filename to use when loading credentials. + * @option options disableAssumeRole [Boolean] (false) True to disable + * support for profiles that assume an IAM role. If true, and an assume + * role profile is selected, an error is raised. + * @option options preferStaticCredentials [Boolean] (false) True to + * prefer static credentials to role_arn if both are present. + * @option options tokenCodeFn [Function] (null) Function to provide + * STS Assume Role TokenCode, if mfa_serial is provided for profile in ini + * file. Function is called with value of mfa_serial and callback, and + * should provide the TokenCode or an error to the callback in the format + * callback(err, token) + * @option options callback [Function] (err) Credentials are eagerly loaded + * by the constructor. When the callback is called with no error, the + * credentials have been loaded successfully. + * @option options httpOptions [map] A set of options to pass to the low-level + * HTTP request. Currently supported options are: + * * **proxy** [String] — the URL to proxy requests through + * * **agent** [http.Agent, https.Agent] — the Agent object to perform + * HTTP requests with. Used for connection pooling. Defaults to the global + * agent (`http.globalAgent`) for non-SSL connections. Note that for + * SSL connections, a special Agent object is used in order to enable + * peer certificate verification. This feature is only available in the + * Node.js environment. + * * **connectTimeout** [Integer] — Sets the socket to timeout after + * failing to establish a connection with the server after + * `connectTimeout` milliseconds. This timeout has no effect once a socket + * connection has been established. + * * **timeout** [Integer] — The number of milliseconds a request can + * take before automatically being terminated. + * Defaults to two minutes (120000). + */ + constructor: function SharedIniFileCredentials(options) { + AWS.Credentials.call(this); + + options = options || {}; + + this.filename = options.filename; + this.profile = options.profile || process.env.AWS_PROFILE || AWS.util.defaultProfile; + this.disableAssumeRole = Boolean(options.disableAssumeRole); + this.preferStaticCredentials = Boolean(options.preferStaticCredentials); + this.tokenCodeFn = options.tokenCodeFn || null; + this.httpOptions = options.httpOptions || null; + this.get(options.callback || AWS.util.fn.noop); + }, + + /** + * @api private + */ + load: function load(callback) { + var self = this; + try { + var profiles = AWS.util.getProfilesFromSharedConfig(iniLoader, this.filename); + var profile = profiles[this.profile] || {}; + + if (Object.keys(profile).length === 0) { + throw AWS.util.error( + new Error('Profile ' + this.profile + ' not found'), + { code: 'SharedIniFileCredentialsProviderFailure' } + ); + } + + /* + In the CLI, the presence of both a role_arn and static credentials have + different meanings depending on how many profiles have been visited. For + the first profile processed, role_arn takes precedence over any static + credentials, but for all subsequent profiles, static credentials are + used if present, and only in their absence will the profile's + source_profile and role_arn keys be used to load another set of + credentials. This var is intended to yield compatible behaviour in this + sdk. + */ + var preferStaticCredentialsToRoleArn = Boolean( + this.preferStaticCredentials + && profile['aws_access_key_id'] + && profile['aws_secret_access_key'] + ); + + if (profile['role_arn'] && !preferStaticCredentialsToRoleArn) { + this.loadRoleProfile(profiles, profile, function(err, data) { + if (err) { + callback(err); + } else { + self.expired = false; + self.accessKeyId = data.Credentials.AccessKeyId; + self.secretAccessKey = data.Credentials.SecretAccessKey; + self.sessionToken = data.Credentials.SessionToken; + self.expireTime = data.Credentials.Expiration; + callback(null); + } + }); + return; + } + + this.accessKeyId = profile['aws_access_key_id']; + this.secretAccessKey = profile['aws_secret_access_key']; + this.sessionToken = profile['aws_session_token']; + + if (!this.accessKeyId || !this.secretAccessKey) { + throw AWS.util.error( + new Error('Credentials not set for profile ' + this.profile), + { code: 'SharedIniFileCredentialsProviderFailure' } + ); + } + this.expired = false; + callback(null); + } catch (err) { + callback(err); + } + }, + + /** + * Loads the credentials from the shared credentials file + * + * @callback callback function(err) + * Called after the shared INI file on disk is read and parsed. When this + * callback is called with no error, it means that the credentials + * information has been loaded into the object (as the `accessKeyId`, + * `secretAccessKey`, and `sessionToken` properties). + * @param err [Error] if an error occurred, this value will be filled + * @see get + */ + refresh: function refresh(callback) { + iniLoader.clearCachedFiles(); + this.coalesceRefresh( + callback || AWS.util.fn.callback, + this.disableAssumeRole + ); + }, + + /** + * @api private + */ + loadRoleProfile: function loadRoleProfile(creds, roleProfile, callback) { + if (this.disableAssumeRole) { + throw AWS.util.error( + new Error('Role assumption profiles are disabled. ' + + 'Failed to load profile ' + this.profile + + ' from ' + creds.filename), + { code: 'SharedIniFileCredentialsProviderFailure' } + ); + } + + var self = this; + var roleArn = roleProfile['role_arn']; + var roleSessionName = roleProfile['role_session_name']; + var externalId = roleProfile['external_id']; + var mfaSerial = roleProfile['mfa_serial']; + var sourceProfileName = roleProfile['source_profile']; + var durationSeconds = parseInt(roleProfile['duration_seconds'], 10) || undefined; + + // From experimentation, the following behavior mimics the AWS CLI: + // + // 1. Use region from the profile if present. + // 2. Otherwise fall back to N. Virginia (global endpoint). + // + // It is necessary to do the fallback explicitly, because if + // 'AWS_STS_REGIONAL_ENDPOINTS=regional', the underlying STS client will + // otherwise throw an error if region is left 'undefined'. + // + // Experimentation shows that the AWS CLI (tested at version 1.18.136) + // ignores the following potential sources of a region for the purposes of + // this AssumeRole call: + // + // - The [default] profile + // - The AWS_REGION environment variable + // + // Ignoring the [default] profile for the purposes of AssumeRole is arguably + // a bug in the CLI since it does use the [default] region for service + // calls... but right now we're matching behavior of the other tool. + var profileRegion = roleProfile['region'] || ASSUME_ROLE_DEFAULT_REGION; + + if (!sourceProfileName) { + throw AWS.util.error( + new Error('source_profile is not set using profile ' + this.profile), + { code: 'SharedIniFileCredentialsProviderFailure' } + ); + } + + var sourceProfileExistanceTest = creds[sourceProfileName]; + + if (typeof sourceProfileExistanceTest !== 'object') { + throw AWS.util.error( + new Error('source_profile ' + sourceProfileName + ' using profile ' + + this.profile + ' does not exist'), + { code: 'SharedIniFileCredentialsProviderFailure' } + ); + } + + var sourceCredentials = new AWS.SharedIniFileCredentials( + AWS.util.merge(this.options || {}, { + profile: sourceProfileName, + preferStaticCredentials: true + }) + ); + + this.roleArn = roleArn; + var sts = new STS({ + credentials: sourceCredentials, + region: profileRegion, + httpOptions: this.httpOptions + }); + + var roleParams = { + DurationSeconds: durationSeconds, + RoleArn: roleArn, + RoleSessionName: roleSessionName || 'aws-sdk-js-' + Date.now() + }; + + if (externalId) { + roleParams.ExternalId = externalId; + } + + if (mfaSerial && self.tokenCodeFn) { + roleParams.SerialNumber = mfaSerial; + self.tokenCodeFn(mfaSerial, function(err, token) { + if (err) { + var message; + if (err instanceof Error) { + message = err.message; + } else { + message = err; + } + callback( + AWS.util.error( + new Error('Error fetching MFA token: ' + message), + { code: 'SharedIniFileCredentialsProviderFailure' } + )); + return; + } + + roleParams.TokenCode = token; + sts.assumeRole(roleParams, callback); + }); + return; + } + sts.assumeRole(roleParams, callback); + } +}); + + +/***/ }), + +/***/ 68335: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var path = __nccwpck_require__(71017); +var crypto = __nccwpck_require__(6113); +var iniLoader = AWS.util.iniLoader; + +/** + * Represents credentials from sso.getRoleCredentials API for + * `sso_*` values defined in shared credentials file. + * + * ## Using SSO credentials + * + * The credentials file must specify the information below to use sso: + * + * [profile sso-profile] + * sso_account_id = 012345678901 + * sso_region = **-****-* + * sso_role_name = SampleRole + * sso_start_url = https://d-******.awsapps.com/start + * + * or using the session format: + * + * [profile sso-token] + * sso_session = prod + * sso_account_id = 012345678901 + * sso_role_name = SampleRole + * + * [sso-session prod] + * sso_region = **-****-* + * sso_start_url = https://d-******.awsapps.com/start + * + * This information will be automatically added to your shared credentials file by running + * `aws configure sso`. + * + * ## Using custom profiles + * + * The SDK supports loading credentials for separate profiles. This can be done + * in two ways: + * + * 1. Set the `AWS_PROFILE` environment variable in your process prior to + * loading the SDK. + * 2. Directly load the AWS.SsoCredentials provider: + * + * ```javascript + * var creds = new AWS.SsoCredentials({profile: 'myprofile'}); + * AWS.config.credentials = creds; + * ``` + * + * @!macro nobrowser + */ +AWS.SsoCredentials = AWS.util.inherit(AWS.Credentials, { + /** + * Creates a new SsoCredentials object. + * + * @param options [map] a set of options + * @option options profile [String] (AWS_PROFILE env var or 'default') + * the name of the profile to load. + * @option options filename [String] ('~/.aws/credentials' or defined by + * AWS_SHARED_CREDENTIALS_FILE process env var) + * the filename to use when loading credentials. + * @option options callback [Function] (err) Credentials are eagerly loaded + * by the constructor. When the callback is called with no error, the + * credentials have been loaded successfully. + */ + constructor: function SsoCredentials(options) { + AWS.Credentials.call(this); + + options = options || {}; + this.errorCode = 'SsoCredentialsProviderFailure'; + this.expired = true; + + this.filename = options.filename; + this.profile = options.profile || process.env.AWS_PROFILE || AWS.util.defaultProfile; + this.service = options.ssoClient; + this.httpOptions = options.httpOptions || null; + this.get(options.callback || AWS.util.fn.noop); + }, + + /** + * @api private + */ + load: function load(callback) { + var self = this; + + try { + var profiles = AWS.util.getProfilesFromSharedConfig(iniLoader, this.filename); + var profile = profiles[this.profile] || {}; + + if (Object.keys(profile).length === 0) { + throw AWS.util.error( + new Error('Profile ' + this.profile + ' not found'), + { code: self.errorCode } + ); + } + + if (profile.sso_session) { + if (!profile.sso_account_id || !profile.sso_role_name) { + throw AWS.util.error( + new Error('Profile ' + this.profile + ' with session ' + profile.sso_session + + ' does not have valid SSO credentials. Required parameters "sso_account_id", "sso_session", ' + + '"sso_role_name". Reference: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sso.html'), + { code: self.errorCode } + ); + } + } else { + if (!profile.sso_start_url || !profile.sso_account_id || !profile.sso_region || !profile.sso_role_name) { + throw AWS.util.error( + new Error('Profile ' + this.profile + ' does not have valid SSO credentials. Required parameters "sso_account_id", "sso_region", ' + + '"sso_role_name", "sso_start_url". Reference: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sso.html'), + { code: self.errorCode } + ); + } + } + + this.getToken(this.profile, profile, function (err, token) { + if (err) { + return callback(err); + } + var request = { + accessToken: token, + accountId: profile.sso_account_id, + roleName: profile.sso_role_name, + }; + + if (!self.service || self.service.config.region !== profile.sso_region) { + self.service = new AWS.SSO({ + region: profile.sso_region, + httpOptions: self.httpOptions, + }); + } + + self.service.getRoleCredentials(request, function(err, data) { + if (err || !data || !data.roleCredentials) { + callback(AWS.util.error( + err || new Error('Please log in using "aws sso login"'), + { code: self.errorCode } + ), null); + } else if (!data.roleCredentials.accessKeyId || !data.roleCredentials.secretAccessKey || !data.roleCredentials.sessionToken || !data.roleCredentials.expiration) { + throw AWS.util.error(new Error( + 'SSO returns an invalid temporary credential.' + )); + } else { + self.expired = false; + self.accessKeyId = data.roleCredentials.accessKeyId; + self.secretAccessKey = data.roleCredentials.secretAccessKey; + self.sessionToken = data.roleCredentials.sessionToken; + self.expireTime = new Date(data.roleCredentials.expiration); + callback(null); + } + }); + }); + } catch (err) { + callback(err); + } + }, + + /** + * @private + * Uses legacy file system retrieval or if sso-session is set, + * use the SSOTokenProvider. + * + * @param {string} profileName - name of the profile. + * @param {object} profile - profile data containing sso_session or sso_start_url etc. + * @param {function} callback - called with (err, (string) token). + * + * @returns {void} + */ + getToken: function getToken(profileName, profile, callback) { + var self = this; + + if (profile.sso_session) { + var _iniLoader = AWS.util.iniLoader; + var ssoSessions = _iniLoader.loadSsoSessionsFrom(); + var ssoSession = ssoSessions[profile.sso_session]; + Object.assign(profile, ssoSession); + + var ssoTokenProvider = new AWS.SSOTokenProvider({ + profile: profileName, + }); + ssoTokenProvider.load(function (err) { + if (err) { + return callback(err); + } + return callback(null, ssoTokenProvider.token); + }); + return; + } + + try { + /** + * The time window (15 mins) that SDK will treat the SSO token expires in before the defined expiration date in token. + * This is needed because server side may have invalidated the token before the defined expiration date. + */ + var EXPIRE_WINDOW_MS = 15 * 60 * 1000; + var hasher = crypto.createHash('sha1'); + var fileName = hasher.update(profile.sso_start_url).digest('hex') + '.json'; + var cachePath = path.join( + iniLoader.getHomeDir(), + '.aws', + 'sso', + 'cache', + fileName + ); + var cacheFile = AWS.util.readFileSync(cachePath); + var cacheContent = null; + if (cacheFile) { + cacheContent = JSON.parse(cacheFile); + } + if (!cacheContent) { + throw AWS.util.error( + new Error('Cached credentials not found under ' + this.profile + ' profile. Please make sure you log in with aws sso login first'), + { code: self.errorCode } + ); + } + + if (!cacheContent.startUrl || !cacheContent.region || !cacheContent.accessToken || !cacheContent.expiresAt) { + throw AWS.util.error( + new Error('Cached credentials are missing required properties. Try running aws sso login.') + ); + } + + if (new Date(cacheContent.expiresAt).getTime() - Date.now() <= EXPIRE_WINDOW_MS) { + throw AWS.util.error(new Error( + 'The SSO session associated with this profile has expired. To refresh this SSO session run aws sso login with the corresponding profile.' + )); + } + + return callback(null, cacheContent.accessToken); + } catch (err) { + return callback(err, null); + } + }, + + /** + * Loads the credentials from the AWS SSO process + * + * @callback callback function(err) + * Called after the AWS SSO process has been executed. When this + * callback is called with no error, it means that the credentials + * information has been loaded into the object (as the `accessKeyId`, + * `secretAccessKey`, and `sessionToken` properties). + * @param err [Error] if an error occurred, this value will be filled + * @see get + */ + refresh: function refresh(callback) { + iniLoader.clearCachedFiles(); + this.coalesceRefresh(callback || AWS.util.fn.callback); + }, +}); + + +/***/ }), + +/***/ 77360: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var STS = __nccwpck_require__(57513); + +/** + * Represents temporary credentials retrieved from {AWS.STS}. Without any + * extra parameters, credentials will be fetched from the + * {AWS.STS.getSessionToken} operation. If an IAM role is provided, the + * {AWS.STS.assumeRole} operation will be used to fetch credentials for the + * role instead. + * + * @note AWS.TemporaryCredentials is deprecated, but remains available for + * backwards compatibility. {AWS.ChainableTemporaryCredentials} is the + * preferred class for temporary credentials. + * + * To setup temporary credentials, configure a set of master credentials + * using the standard credentials providers (environment, EC2 instance metadata, + * or from the filesystem), then set the global credentials to a new + * temporary credentials object: + * + * ```javascript + * // Note that environment credentials are loaded by default, + * // the following line is shown for clarity: + * AWS.config.credentials = new AWS.EnvironmentCredentials('AWS'); + * + * // Now set temporary credentials seeded from the master credentials + * AWS.config.credentials = new AWS.TemporaryCredentials(); + * + * // subsequent requests will now use temporary credentials from AWS STS. + * new AWS.S3().listBucket(function(err, data) { ... }); + * ``` + * + * @!attribute masterCredentials + * @return [AWS.Credentials] the master (non-temporary) credentials used to + * get and refresh temporary credentials from AWS STS. + * @note (see constructor) + */ +AWS.TemporaryCredentials = AWS.util.inherit(AWS.Credentials, { + /** + * Creates a new temporary credentials object. + * + * @note In order to create temporary credentials, you first need to have + * "master" credentials configured in {AWS.Config.credentials}. These + * master credentials are necessary to retrieve the temporary credentials, + * as well as refresh the credentials when they expire. + * @param params [map] a map of options that are passed to the + * {AWS.STS.assumeRole} or {AWS.STS.getSessionToken} operations. + * If a `RoleArn` parameter is passed in, credentials will be based on the + * IAM role. + * @param masterCredentials [AWS.Credentials] the master (non-temporary) credentials + * used to get and refresh temporary credentials from AWS STS. + * @example Creating a new credentials object for generic temporary credentials + * AWS.config.credentials = new AWS.TemporaryCredentials(); + * @example Creating a new credentials object for an IAM role + * AWS.config.credentials = new AWS.TemporaryCredentials({ + * RoleArn: 'arn:aws:iam::1234567890:role/TemporaryCredentials', + * }); + * @see AWS.STS.assumeRole + * @see AWS.STS.getSessionToken + */ + constructor: function TemporaryCredentials(params, masterCredentials) { + AWS.Credentials.call(this); + this.loadMasterCredentials(masterCredentials); + this.expired = true; + + this.params = params || {}; + if (this.params.RoleArn) { + this.params.RoleSessionName = + this.params.RoleSessionName || 'temporary-credentials'; + } + }, + + /** + * Refreshes credentials using {AWS.STS.assumeRole} or + * {AWS.STS.getSessionToken}, depending on whether an IAM role ARN was passed + * to the credentials {constructor}. + * + * @callback callback function(err) + * Called when the STS service responds (or fails). When + * this callback is called with no error, it means that the credentials + * information has been loaded into the object (as the `accessKeyId`, + * `secretAccessKey`, and `sessionToken` properties). + * @param err [Error] if an error occurred, this value will be filled + * @see get + */ + refresh: function refresh (callback) { + this.coalesceRefresh(callback || AWS.util.fn.callback); + }, + + /** + * @api private + */ + load: function load (callback) { + var self = this; + self.createClients(); + self.masterCredentials.get(function () { + self.service.config.credentials = self.masterCredentials; + var operation = self.params.RoleArn ? + self.service.assumeRole : self.service.getSessionToken; + operation.call(self.service, function (err, data) { + if (!err) { + self.service.credentialsFrom(data, self); + } + callback(err); + }); + }); + }, + + /** + * @api private + */ + loadMasterCredentials: function loadMasterCredentials (masterCredentials) { + this.masterCredentials = masterCredentials || AWS.config.credentials; + while (this.masterCredentials.masterCredentials) { + this.masterCredentials = this.masterCredentials.masterCredentials; + } + + if (typeof this.masterCredentials.get !== 'function') { + this.masterCredentials = new AWS.Credentials(this.masterCredentials); + } + }, + + /** + * @api private + */ + createClients: function () { + this.service = this.service || new STS({params: this.params}); + } + +}); + + +/***/ }), + +/***/ 11017: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var fs = __nccwpck_require__(57147); +var STS = __nccwpck_require__(57513); +var iniLoader = AWS.util.iniLoader; + +/** + * Represents OIDC credentials from a file on disk + * If the credentials expire, the SDK can {refresh} the credentials + * from the file. + * + * ## Using the web identity token file + * + * This provider is checked by default in the Node.js environment. To use + * the provider simply add your OIDC token to a file (ASCII encoding) and + * share the filename in either AWS_WEB_IDENTITY_TOKEN_FILE environment + * variable or web_identity_token_file shared config variable + * + * The file contains encoded OIDC token and the characters are + * ASCII encoded. OIDC tokens are JSON Web Tokens (JWT). + * JWT's are 3 base64 encoded strings joined by the '.' character. + * + * This class will read filename from AWS_WEB_IDENTITY_TOKEN_FILE + * environment variable or web_identity_token_file shared config variable, + * and get the OIDC token from filename. + * It will also read IAM role to be assumed from AWS_ROLE_ARN + * environment variable or role_arn shared config variable. + * This provider gets credetials using the {AWS.STS.assumeRoleWithWebIdentity} + * service operation + * + * @!macro nobrowser + */ +AWS.TokenFileWebIdentityCredentials = AWS.util.inherit(AWS.Credentials, { + + /** + * @example Creating a new credentials object + * AWS.config.credentials = new AWS.TokenFileWebIdentityCredentials( + * // optionally provide configuration to apply to the underlying AWS.STS service client + * // if configuration is not provided, then configuration will be pulled from AWS.config + * { + * // specify timeout options + * httpOptions: { + * timeout: 100 + * } + * }); + * @see AWS.Config + */ + constructor: function TokenFileWebIdentityCredentials(clientConfig) { + AWS.Credentials.call(this); + this.data = null; + this.clientConfig = AWS.util.copy(clientConfig || {}); + }, + + /** + * Returns params from environment variables + * + * @api private + */ + getParamsFromEnv: function getParamsFromEnv() { + var ENV_TOKEN_FILE = 'AWS_WEB_IDENTITY_TOKEN_FILE', + ENV_ROLE_ARN = 'AWS_ROLE_ARN'; + if (process.env[ENV_TOKEN_FILE] && process.env[ENV_ROLE_ARN]) { + return [{ + envTokenFile: process.env[ENV_TOKEN_FILE], + roleArn: process.env[ENV_ROLE_ARN], + roleSessionName: process.env['AWS_ROLE_SESSION_NAME'] + }]; + } + }, + + /** + * Returns params from shared config variables + * + * @api private + */ + getParamsFromSharedConfig: function getParamsFromSharedConfig() { + var profiles = AWS.util.getProfilesFromSharedConfig(iniLoader); + var profileName = process.env.AWS_PROFILE || AWS.util.defaultProfile; + var profile = profiles[profileName] || {}; + + if (Object.keys(profile).length === 0) { + throw AWS.util.error( + new Error('Profile ' + profileName + ' not found'), + { code: 'TokenFileWebIdentityCredentialsProviderFailure' } + ); + } + + var paramsArray = []; + + while (!profile['web_identity_token_file'] && profile['source_profile']) { + paramsArray.unshift({ + roleArn: profile['role_arn'], + roleSessionName: profile['role_session_name'] + }); + var sourceProfile = profile['source_profile']; + profile = profiles[sourceProfile]; + } + + paramsArray.unshift({ + envTokenFile: profile['web_identity_token_file'], + roleArn: profile['role_arn'], + roleSessionName: profile['role_session_name'] + }); + + return paramsArray; + }, + + /** + * Refreshes credentials using {AWS.STS.assumeRoleWithWebIdentity} + * + * @callback callback function(err) + * Called when the STS service responds (or fails). When + * this callback is called with no error, it means that the credentials + * information has been loaded into the object (as the `accessKeyId`, + * `secretAccessKey`, and `sessionToken` properties). + * @param err [Error] if an error occurred, this value will be filled + * @see AWS.Credentials.get + */ + refresh: function refresh(callback) { + this.coalesceRefresh(callback || AWS.util.fn.callback); + }, + + /** + * @api private + */ + assumeRoleChaining: function assumeRoleChaining(paramsArray, callback) { + var self = this; + if (paramsArray.length === 0) { + self.service.credentialsFrom(self.data, self); + callback(); + } else { + var params = paramsArray.shift(); + self.service.config.credentials = self.service.credentialsFrom(self.data, self); + self.service.assumeRole( + { + RoleArn: params.roleArn, + RoleSessionName: params.roleSessionName || 'token-file-web-identity' + }, + function (err, data) { + self.data = null; + if (err) { + callback(err); + } else { + self.data = data; + self.assumeRoleChaining(paramsArray, callback); + } + } + ); + } + }, + + /** + * @api private + */ + load: function load(callback) { + var self = this; + try { + var paramsArray = self.getParamsFromEnv(); + if (!paramsArray) { + paramsArray = self.getParamsFromSharedConfig(); + } + if (paramsArray) { + var params = paramsArray.shift(); + var oidcToken = fs.readFileSync(params.envTokenFile, {encoding: 'ascii'}); + if (!self.service) { + self.createClients(); + } + self.service.assumeRoleWithWebIdentity( + { + WebIdentityToken: oidcToken, + RoleArn: params.roleArn, + RoleSessionName: params.roleSessionName || 'token-file-web-identity' + }, + function (err, data) { + self.data = null; + if (err) { + callback(err); + } else { + self.data = data; + self.assumeRoleChaining(paramsArray, callback); + } + } + ); + } + } catch (err) { + callback(err); + } + }, + + /** + * @api private + */ + createClients: function() { + if (!this.service) { + var stsConfig = AWS.util.merge({}, this.clientConfig); + this.service = new STS(stsConfig); + + // Retry in case of IDPCommunicationErrorException or InvalidIdentityToken + this.service.retryableError = function(error) { + if (error.code === 'IDPCommunicationErrorException' || error.code === 'InvalidIdentityToken') { + return true; + } else { + return AWS.Service.prototype.retryableError.call(this, error); + } + }; + } + } +}); + + +/***/ }), + +/***/ 74998: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var STS = __nccwpck_require__(57513); + +/** + * Represents credentials retrieved from STS Web Identity Federation support. + * + * By default this provider gets credentials using the + * {AWS.STS.assumeRoleWithWebIdentity} service operation. This operation + * requires a `RoleArn` containing the ARN of the IAM trust policy for the + * application for which credentials will be given. In addition, the + * `WebIdentityToken` must be set to the token provided by the identity + * provider. See {constructor} for an example on creating a credentials + * object with proper `RoleArn` and `WebIdentityToken` values. + * + * ## Refreshing Credentials from Identity Service + * + * In addition to AWS credentials expiring after a given amount of time, the + * login token from the identity provider will also expire. Once this token + * expires, it will not be usable to refresh AWS credentials, and another + * token will be needed. The SDK does not manage refreshing of the token value, + * but this can be done through a "refresh token" supported by most identity + * providers. Consult the documentation for the identity provider for refreshing + * tokens. Once the refreshed token is acquired, you should make sure to update + * this new token in the credentials object's {params} property. The following + * code will update the WebIdentityToken, assuming you have retrieved an updated + * token from the identity provider: + * + * ```javascript + * AWS.config.credentials.params.WebIdentityToken = updatedToken; + * ``` + * + * Future calls to `credentials.refresh()` will now use the new token. + * + * @!attribute params + * @return [map] the map of params passed to + * {AWS.STS.assumeRoleWithWebIdentity}. To update the token, set the + * `params.WebIdentityToken` property. + * @!attribute data + * @return [map] the raw data response from the call to + * {AWS.STS.assumeRoleWithWebIdentity}. Use this if you want to get + * access to other properties from the response. + */ +AWS.WebIdentityCredentials = AWS.util.inherit(AWS.Credentials, { + /** + * Creates a new credentials object. + * @param (see AWS.STS.assumeRoleWithWebIdentity) + * @example Creating a new credentials object + * AWS.config.credentials = new AWS.WebIdentityCredentials({ + * RoleArn: 'arn:aws:iam::1234567890:role/WebIdentity', + * WebIdentityToken: 'ABCDEFGHIJKLMNOP', // token from identity service + * RoleSessionName: 'web' // optional name, defaults to web-identity + * }, { + * // optionally provide configuration to apply to the underlying AWS.STS service client + * // if configuration is not provided, then configuration will be pulled from AWS.config + * + * // specify timeout options + * httpOptions: { + * timeout: 100 + * } + * }); + * @see AWS.STS.assumeRoleWithWebIdentity + * @see AWS.Config + */ + constructor: function WebIdentityCredentials(params, clientConfig) { + AWS.Credentials.call(this); + this.expired = true; + this.params = params; + this.params.RoleSessionName = this.params.RoleSessionName || 'web-identity'; + this.data = null; + this._clientConfig = AWS.util.copy(clientConfig || {}); + }, + + /** + * Refreshes credentials using {AWS.STS.assumeRoleWithWebIdentity} + * + * @callback callback function(err) + * Called when the STS service responds (or fails). When + * this callback is called with no error, it means that the credentials + * information has been loaded into the object (as the `accessKeyId`, + * `secretAccessKey`, and `sessionToken` properties). + * @param err [Error] if an error occurred, this value will be filled + * @see get + */ + refresh: function refresh(callback) { + this.coalesceRefresh(callback || AWS.util.fn.callback); + }, + + /** + * @api private + */ + load: function load(callback) { + var self = this; + self.createClients(); + self.service.assumeRoleWithWebIdentity(function (err, data) { + self.data = null; + if (!err) { + self.data = data; + self.service.credentialsFrom(data, self); + } + callback(err); + }); + }, + + /** + * @api private + */ + createClients: function() { + if (!this.service) { + var stsConfig = AWS.util.merge({}, this._clientConfig); + stsConfig.params = this.params; + this.service = new STS(stsConfig); + } + } + +}); + + +/***/ }), + +/***/ 45313: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var util = __nccwpck_require__(77985); +var endpointDiscoveryEnabledEnvs = ['AWS_ENABLE_ENDPOINT_DISCOVERY', 'AWS_ENDPOINT_DISCOVERY_ENABLED']; + +/** + * Generate key (except resources and operation part) to index the endpoints in the cache + * If input shape has endpointdiscoveryid trait then use + * accessKey + operation + resources + region + service as cache key + * If input shape doesn't have endpointdiscoveryid trait then use + * accessKey + region + service as cache key + * @return [map] object with keys to index endpoints. + * @api private + */ +function getCacheKey(request) { + var service = request.service; + var api = service.api || {}; + var operations = api.operations; + var identifiers = {}; + if (service.config.region) { + identifiers.region = service.config.region; + } + if (api.serviceId) { + identifiers.serviceId = api.serviceId; + } + if (service.config.credentials.accessKeyId) { + identifiers.accessKeyId = service.config.credentials.accessKeyId; + } + return identifiers; +} + +/** + * Recursive helper for marshallCustomIdentifiers(). + * Looks for required string input members that have 'endpointdiscoveryid' trait. + * @api private + */ +function marshallCustomIdentifiersHelper(result, params, shape) { + if (!shape || params === undefined || params === null) return; + if (shape.type === 'structure' && shape.required && shape.required.length > 0) { + util.arrayEach(shape.required, function(name) { + var memberShape = shape.members[name]; + if (memberShape.endpointDiscoveryId === true) { + var locationName = memberShape.isLocationName ? memberShape.name : name; + result[locationName] = String(params[name]); + } else { + marshallCustomIdentifiersHelper(result, params[name], memberShape); + } + }); + } +} + +/** + * Get custom identifiers for cache key. + * Identifies custom identifiers by checking each shape's `endpointDiscoveryId` trait. + * @param [object] request object + * @param [object] input shape of the given operation's api + * @api private + */ +function marshallCustomIdentifiers(request, shape) { + var identifiers = {}; + marshallCustomIdentifiersHelper(identifiers, request.params, shape); + return identifiers; +} + +/** + * Call endpoint discovery operation when it's optional. + * When endpoint is available in cache then use the cached endpoints. If endpoints + * are unavailable then use regional endpoints and call endpoint discovery operation + * asynchronously. This is turned off by default. + * @param [object] request object + * @api private + */ +function optionalDiscoverEndpoint(request) { + var service = request.service; + var api = service.api; + var operationModel = api.operations ? api.operations[request.operation] : undefined; + var inputShape = operationModel ? operationModel.input : undefined; + + var identifiers = marshallCustomIdentifiers(request, inputShape); + var cacheKey = getCacheKey(request); + if (Object.keys(identifiers).length > 0) { + cacheKey = util.update(cacheKey, identifiers); + if (operationModel) cacheKey.operation = operationModel.name; + } + var endpoints = AWS.endpointCache.get(cacheKey); + if (endpoints && endpoints.length === 1 && endpoints[0].Address === '') { + //endpoint operation is being made but response not yet received + //or endpoint operation just failed in 1 minute + return; + } else if (endpoints && endpoints.length > 0) { + //found endpoint record from cache + request.httpRequest.updateEndpoint(endpoints[0].Address); + } else { + //endpoint record not in cache or outdated. make discovery operation + var endpointRequest = service.makeRequest(api.endpointOperation, { + Operation: operationModel.name, + Identifiers: identifiers, + }); + addApiVersionHeader(endpointRequest); + endpointRequest.removeListener('validate', AWS.EventListeners.Core.VALIDATE_PARAMETERS); + endpointRequest.removeListener('retry', AWS.EventListeners.Core.RETRY_CHECK); + //put in a placeholder for endpoints already requested, prevent + //too much in-flight calls + AWS.endpointCache.put(cacheKey, [{ + Address: '', + CachePeriodInMinutes: 1 + }]); + endpointRequest.send(function(err, data) { + if (data && data.Endpoints) { + AWS.endpointCache.put(cacheKey, data.Endpoints); + } else if (err) { + AWS.endpointCache.put(cacheKey, [{ + Address: '', + CachePeriodInMinutes: 1 //not to make more endpoint operation in next 1 minute + }]); + } + }); + } +} + +var requestQueue = {}; + +/** + * Call endpoint discovery operation when it's required. + * When endpoint is available in cache then use cached ones. If endpoints are + * unavailable then SDK should call endpoint operation then use returned new + * endpoint for the api call. SDK will automatically attempt to do endpoint + * discovery. This is turned off by default + * @param [object] request object + * @api private + */ +function requiredDiscoverEndpoint(request, done) { + var service = request.service; + var api = service.api; + var operationModel = api.operations ? api.operations[request.operation] : undefined; + var inputShape = operationModel ? operationModel.input : undefined; + + var identifiers = marshallCustomIdentifiers(request, inputShape); + var cacheKey = getCacheKey(request); + if (Object.keys(identifiers).length > 0) { + cacheKey = util.update(cacheKey, identifiers); + if (operationModel) cacheKey.operation = operationModel.name; + } + var cacheKeyStr = AWS.EndpointCache.getKeyString(cacheKey); + var endpoints = AWS.endpointCache.get(cacheKeyStr); //endpoint cache also accepts string keys + if (endpoints && endpoints.length === 1 && endpoints[0].Address === '') { + //endpoint operation is being made but response not yet received + //push request object to a pending queue + if (!requestQueue[cacheKeyStr]) requestQueue[cacheKeyStr] = []; + requestQueue[cacheKeyStr].push({request: request, callback: done}); + return; + } else if (endpoints && endpoints.length > 0) { + request.httpRequest.updateEndpoint(endpoints[0].Address); + done(); + } else { + var endpointRequest = service.makeRequest(api.endpointOperation, { + Operation: operationModel.name, + Identifiers: identifiers, + }); + endpointRequest.removeListener('validate', AWS.EventListeners.Core.VALIDATE_PARAMETERS); + addApiVersionHeader(endpointRequest); + + //put in a placeholder for endpoints already requested, prevent + //too much in-flight calls + AWS.endpointCache.put(cacheKeyStr, [{ + Address: '', + CachePeriodInMinutes: 60 //long-live cache + }]); + endpointRequest.send(function(err, data) { + if (err) { + request.response.error = util.error(err, { retryable: false }); + AWS.endpointCache.remove(cacheKey); + + //fail all the pending requests in batch + if (requestQueue[cacheKeyStr]) { + var pendingRequests = requestQueue[cacheKeyStr]; + util.arrayEach(pendingRequests, function(requestContext) { + requestContext.request.response.error = util.error(err, { retryable: false }); + requestContext.callback(); + }); + delete requestQueue[cacheKeyStr]; + } + } else if (data) { + AWS.endpointCache.put(cacheKeyStr, data.Endpoints); + request.httpRequest.updateEndpoint(data.Endpoints[0].Address); + + //update the endpoint for all the pending requests in batch + if (requestQueue[cacheKeyStr]) { + var pendingRequests = requestQueue[cacheKeyStr]; + util.arrayEach(pendingRequests, function(requestContext) { + requestContext.request.httpRequest.updateEndpoint(data.Endpoints[0].Address); + requestContext.callback(); + }); + delete requestQueue[cacheKeyStr]; + } + } + done(); + }); + } +} + +/** + * add api version header to endpoint operation + * @api private + */ +function addApiVersionHeader(endpointRequest) { + var api = endpointRequest.service.api; + var apiVersion = api.apiVersion; + if (apiVersion && !endpointRequest.httpRequest.headers['x-amz-api-version']) { + endpointRequest.httpRequest.headers['x-amz-api-version'] = apiVersion; + } +} + +/** + * If api call gets invalid endpoint exception, SDK should attempt to remove the invalid + * endpoint from cache. + * @api private + */ +function invalidateCachedEndpoints(response) { + var error = response.error; + var httpResponse = response.httpResponse; + if (error && + (error.code === 'InvalidEndpointException' || httpResponse.statusCode === 421) + ) { + var request = response.request; + var operations = request.service.api.operations || {}; + var inputShape = operations[request.operation] ? operations[request.operation].input : undefined; + var identifiers = marshallCustomIdentifiers(request, inputShape); + var cacheKey = getCacheKey(request); + if (Object.keys(identifiers).length > 0) { + cacheKey = util.update(cacheKey, identifiers); + if (operations[request.operation]) cacheKey.operation = operations[request.operation].name; + } + AWS.endpointCache.remove(cacheKey); + } +} + +/** + * If endpoint is explicitly configured, SDK should not do endpoint discovery in anytime. + * @param [object] client Service client object. + * @api private + */ +function hasCustomEndpoint(client) { + //if set endpoint is set for specific client, enable endpoint discovery will raise an error. + if (client._originalConfig && client._originalConfig.endpoint && client._originalConfig.endpointDiscoveryEnabled === true) { + throw util.error(new Error(), { + code: 'ConfigurationException', + message: 'Custom endpoint is supplied; endpointDiscoveryEnabled must not be true.' + }); + }; + var svcConfig = AWS.config[client.serviceIdentifier] || {}; + return Boolean(AWS.config.endpoint || svcConfig.endpoint || (client._originalConfig && client._originalConfig.endpoint)); +} + +/** + * @api private + */ +function isFalsy(value) { + return ['false', '0'].indexOf(value) >= 0; +} + +/** + * If endpoint discovery should perform for this request when no operation requires endpoint + * discovery for the given service. + * SDK performs config resolution in order like below: + * 1. If set in client configuration. + * 2. If set in env AWS_ENABLE_ENDPOINT_DISCOVERY. + * 3. If set in shared ini config file with key 'endpoint_discovery_enabled'. + * @param [object] request request object. + * @returns [boolean|undefined] if endpoint discovery config is not set in any source, this + * function returns undefined + * @api private + */ +function resolveEndpointDiscoveryConfig(request) { + var service = request.service || {}; + if (service.config.endpointDiscoveryEnabled !== undefined) { + return service.config.endpointDiscoveryEnabled; + } + + //shared ini file is only available in Node + //not to check env in browser + if (util.isBrowser()) return undefined; + + // If any of recognized endpoint discovery config env is set + for (var i = 0; i < endpointDiscoveryEnabledEnvs.length; i++) { + var env = endpointDiscoveryEnabledEnvs[i]; + if (Object.prototype.hasOwnProperty.call(process.env, env)) { + if (process.env[env] === '' || process.env[env] === undefined) { + throw util.error(new Error(), { + code: 'ConfigurationException', + message: 'environmental variable ' + env + ' cannot be set to nothing' + }); + } + return !isFalsy(process.env[env]); + } + } + + var configFile = {}; + try { + configFile = AWS.util.iniLoader ? AWS.util.iniLoader.loadFrom({ + isConfig: true, + filename: process.env[AWS.util.sharedConfigFileEnv] + }) : {}; + } catch (e) {} + var sharedFileConfig = configFile[ + process.env.AWS_PROFILE || AWS.util.defaultProfile + ] || {}; + if (Object.prototype.hasOwnProperty.call(sharedFileConfig, 'endpoint_discovery_enabled')) { + if (sharedFileConfig.endpoint_discovery_enabled === undefined) { + throw util.error(new Error(), { + code: 'ConfigurationException', + message: 'config file entry \'endpoint_discovery_enabled\' cannot be set to nothing' + }); + } + return !isFalsy(sharedFileConfig.endpoint_discovery_enabled); + } + return undefined; +} + +/** + * attach endpoint discovery logic to request object + * @param [object] request + * @api private + */ +function discoverEndpoint(request, done) { + var service = request.service || {}; + if (hasCustomEndpoint(service) || request.isPresigned()) return done(); + + var operations = service.api.operations || {}; + var operationModel = operations[request.operation]; + var isEndpointDiscoveryRequired = operationModel ? operationModel.endpointDiscoveryRequired : 'NULL'; + var isEnabled = resolveEndpointDiscoveryConfig(request); + var hasRequiredEndpointDiscovery = service.api.hasRequiredEndpointDiscovery; + if (isEnabled || hasRequiredEndpointDiscovery) { + // Once a customer enables endpoint discovery, the SDK should start appending + // the string endpoint-discovery to the user-agent on all requests. + request.httpRequest.appendToUserAgent('endpoint-discovery'); + } + switch (isEndpointDiscoveryRequired) { + case 'OPTIONAL': + if (isEnabled || hasRequiredEndpointDiscovery) { + // For a given service; if at least one operation requires endpoint discovery then the SDK must enable endpoint discovery + // by default for all operations of that service, including operations where endpoint discovery is optional. + optionalDiscoverEndpoint(request); + request.addNamedListener('INVALIDATE_CACHED_ENDPOINTS', 'extractError', invalidateCachedEndpoints); + } + done(); + break; + case 'REQUIRED': + if (isEnabled === false) { + // For a given operation; if endpoint discovery is required and it has been disabled on the SDK client, + // then the SDK must return a clear and actionable exception. + request.response.error = util.error(new Error(), { + code: 'ConfigurationException', + message: 'Endpoint Discovery is disabled but ' + service.api.className + '.' + request.operation + + '() requires it. Please check your configurations.' + }); + done(); + break; + } + request.addNamedListener('INVALIDATE_CACHED_ENDPOINTS', 'extractError', invalidateCachedEndpoints); + requiredDiscoverEndpoint(request, done); + break; + case 'NULL': + default: + done(); + break; + } +} + +module.exports = { + discoverEndpoint: discoverEndpoint, + requiredDiscoverEndpoint: requiredDiscoverEndpoint, + optionalDiscoverEndpoint: optionalDiscoverEndpoint, + marshallCustomIdentifiers: marshallCustomIdentifiers, + getCacheKey: getCacheKey, + invalidateCachedEndpoint: invalidateCachedEndpoints, +}; + + +/***/ }), + +/***/ 76663: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var util = AWS.util; +var typeOf = (__nccwpck_require__(48084).typeOf); +var DynamoDBSet = __nccwpck_require__(20304); +var NumberValue = __nccwpck_require__(91593); + +AWS.DynamoDB.Converter = { + /** + * Convert a JavaScript value to its equivalent DynamoDB AttributeValue type + * + * @param data [any] The data to convert to a DynamoDB AttributeValue + * @param options [map] + * @option options convertEmptyValues [Boolean] Whether to automatically + * convert empty strings, blobs, + * and sets to `null` + * @option options wrapNumbers [Boolean] Whether to return numbers as a + * NumberValue object instead of + * converting them to native JavaScript + * numbers. This allows for the safe + * round-trip transport of numbers of + * arbitrary size. + * @return [map] An object in the Amazon DynamoDB AttributeValue format + * + * @see AWS.DynamoDB.Converter.marshall AWS.DynamoDB.Converter.marshall to + * convert entire records (rather than individual attributes) + */ + input: function convertInput(data, options) { + options = options || {}; + var type = typeOf(data); + if (type === 'Object') { + return formatMap(data, options); + } else if (type === 'Array') { + return formatList(data, options); + } else if (type === 'Set') { + return formatSet(data, options); + } else if (type === 'String') { + if (data.length === 0 && options.convertEmptyValues) { + return convertInput(null); + } + return { S: data }; + } else if (type === 'Number' || type === 'NumberValue') { + return { N: data.toString() }; + } else if (type === 'Binary') { + if (data.length === 0 && options.convertEmptyValues) { + return convertInput(null); + } + return { B: data }; + } else if (type === 'Boolean') { + return { BOOL: data }; + } else if (type === 'null') { + return { NULL: true }; + } else if (type !== 'undefined' && type !== 'Function') { + // this value has a custom constructor + return formatMap(data, options); + } + }, + + /** + * Convert a JavaScript object into a DynamoDB record. + * + * @param data [any] The data to convert to a DynamoDB record + * @param options [map] + * @option options convertEmptyValues [Boolean] Whether to automatically + * convert empty strings, blobs, + * and sets to `null` + * @option options wrapNumbers [Boolean] Whether to return numbers as a + * NumberValue object instead of + * converting them to native JavaScript + * numbers. This allows for the safe + * round-trip transport of numbers of + * arbitrary size. + * + * @return [map] An object in the DynamoDB record format. + * + * @example Convert a JavaScript object into a DynamoDB record + * var marshalled = AWS.DynamoDB.Converter.marshall({ + * string: 'foo', + * list: ['fizz', 'buzz', 'pop'], + * map: { + * nestedMap: { + * key: 'value', + * } + * }, + * number: 123, + * nullValue: null, + * boolValue: true, + * stringSet: new DynamoDBSet(['foo', 'bar', 'baz']) + * }); + */ + marshall: function marshallItem(data, options) { + return AWS.DynamoDB.Converter.input(data, options).M; + }, + + /** + * Convert a DynamoDB AttributeValue object to its equivalent JavaScript type. + * + * @param data [map] An object in the Amazon DynamoDB AttributeValue format + * @param options [map] + * @option options convertEmptyValues [Boolean] Whether to automatically + * convert empty strings, blobs, + * and sets to `null` + * @option options wrapNumbers [Boolean] Whether to return numbers as a + * NumberValue object instead of + * converting them to native JavaScript + * numbers. This allows for the safe + * round-trip transport of numbers of + * arbitrary size. + * + * @return [Object|Array|String|Number|Boolean|null] + * + * @see AWS.DynamoDB.Converter.unmarshall AWS.DynamoDB.Converter.unmarshall to + * convert entire records (rather than individual attributes) + */ + output: function convertOutput(data, options) { + options = options || {}; + var list, map, i; + for (var type in data) { + var values = data[type]; + if (type === 'M') { + map = {}; + for (var key in values) { + map[key] = convertOutput(values[key], options); + } + return map; + } else if (type === 'L') { + list = []; + for (i = 0; i < values.length; i++) { + list.push(convertOutput(values[i], options)); + } + return list; + } else if (type === 'SS') { + list = []; + for (i = 0; i < values.length; i++) { + list.push(values[i] + ''); + } + return new DynamoDBSet(list); + } else if (type === 'NS') { + list = []; + for (i = 0; i < values.length; i++) { + list.push(convertNumber(values[i], options.wrapNumbers)); + } + return new DynamoDBSet(list); + } else if (type === 'BS') { + list = []; + for (i = 0; i < values.length; i++) { + list.push(AWS.util.buffer.toBuffer(values[i])); + } + return new DynamoDBSet(list); + } else if (type === 'S') { + return values + ''; + } else if (type === 'N') { + return convertNumber(values, options.wrapNumbers); + } else if (type === 'B') { + return util.buffer.toBuffer(values); + } else if (type === 'BOOL') { + return (values === 'true' || values === 'TRUE' || values === true); + } else if (type === 'NULL') { + return null; + } + } + }, + + /** + * Convert a DynamoDB record into a JavaScript object. + * + * @param data [any] The DynamoDB record + * @param options [map] + * @option options convertEmptyValues [Boolean] Whether to automatically + * convert empty strings, blobs, + * and sets to `null` + * @option options wrapNumbers [Boolean] Whether to return numbers as a + * NumberValue object instead of + * converting them to native JavaScript + * numbers. This allows for the safe + * round-trip transport of numbers of + * arbitrary size. + * + * @return [map] An object whose properties have been converted from + * DynamoDB's AttributeValue format into their corresponding native + * JavaScript types. + * + * @example Convert a record received from a DynamoDB stream + * var unmarshalled = AWS.DynamoDB.Converter.unmarshall({ + * string: {S: 'foo'}, + * list: {L: [{S: 'fizz'}, {S: 'buzz'}, {S: 'pop'}]}, + * map: { + * M: { + * nestedMap: { + * M: { + * key: {S: 'value'} + * } + * } + * } + * }, + * number: {N: '123'}, + * nullValue: {NULL: true}, + * boolValue: {BOOL: true} + * }); + */ + unmarshall: function unmarshall(data, options) { + return AWS.DynamoDB.Converter.output({M: data}, options); + } +}; + +/** + * @api private + * @param data [Array] + * @param options [map] + */ +function formatList(data, options) { + var list = {L: []}; + for (var i = 0; i < data.length; i++) { + list['L'].push(AWS.DynamoDB.Converter.input(data[i], options)); + } + return list; +} + +/** + * @api private + * @param value [String] + * @param wrapNumbers [Boolean] + */ +function convertNumber(value, wrapNumbers) { + return wrapNumbers ? new NumberValue(value) : Number(value); +} + +/** + * @api private + * @param data [map] + * @param options [map] + */ +function formatMap(data, options) { + var map = {M: {}}; + for (var key in data) { + var formatted = AWS.DynamoDB.Converter.input(data[key], options); + if (formatted !== void 0) { + map['M'][key] = formatted; + } + } + return map; +} + +/** + * @api private + */ +function formatSet(data, options) { + options = options || {}; + var values = data.values; + if (options.convertEmptyValues) { + values = filterEmptySetValues(data); + if (values.length === 0) { + return AWS.DynamoDB.Converter.input(null); + } + } + + var map = {}; + switch (data.type) { + case 'String': map['SS'] = values; break; + case 'Binary': map['BS'] = values; break; + case 'Number': map['NS'] = values.map(function (value) { + return value.toString(); + }); + } + return map; +} + +/** + * @api private + */ +function filterEmptySetValues(set) { + var nonEmptyValues = []; + var potentiallyEmptyTypes = { + String: true, + Binary: true, + Number: false + }; + if (potentiallyEmptyTypes[set.type]) { + for (var i = 0; i < set.values.length; i++) { + if (set.values[i].length === 0) { + continue; + } + nonEmptyValues.push(set.values[i]); + } + + return nonEmptyValues; + } + + return set.values; +} + +/** + * @api private + */ +module.exports = AWS.DynamoDB.Converter; + + +/***/ }), + +/***/ 90030: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var Translator = __nccwpck_require__(34222); +var DynamoDBSet = __nccwpck_require__(20304); + +/** + * The document client simplifies working with items in Amazon DynamoDB + * by abstracting away the notion of attribute values. This abstraction + * annotates native JavaScript types supplied as input parameters, as well + * as converts annotated response data to native JavaScript types. + * + * ## Marshalling Input and Unmarshalling Response Data + * + * The document client affords developers the use of native JavaScript types + * instead of `AttributeValue`s to simplify the JavaScript development + * experience with Amazon DynamoDB. JavaScript objects passed in as parameters + * are marshalled into `AttributeValue` shapes required by Amazon DynamoDB. + * Responses from DynamoDB are unmarshalled into plain JavaScript objects + * by the `DocumentClient`. The `DocumentClient`, does not accept + * `AttributeValue`s in favor of native JavaScript types. + * + * | JavaScript Type | DynamoDB AttributeValue | + * |:----------------------------------------------------------------------:|-------------------------| + * | String | S | + * | Number | N | + * | Boolean | BOOL | + * | null | NULL | + * | Array | L | + * | Object | M | + * | Buffer, File, Blob, ArrayBuffer, DataView, and JavaScript typed arrays | B | + * + * ## Support for Sets + * + * The `DocumentClient` offers a convenient way to create sets from + * JavaScript Arrays. The type of set is inferred from the first element + * in the array. DynamoDB supports string, number, and binary sets. To + * learn more about supported types see the + * [Amazon DynamoDB Data Model Documentation](http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataModel.html) + * For more information see {AWS.DynamoDB.DocumentClient.createSet} + * + */ +AWS.DynamoDB.DocumentClient = AWS.util.inherit({ + + /** + * Creates a DynamoDB document client with a set of configuration options. + * + * @option options params [map] An optional map of parameters to bind to every + * request sent by this service object. + * @option options service [AWS.DynamoDB] An optional pre-configured instance + * of the AWS.DynamoDB service object. This instance's config will be + * copied to a new instance used by this client. You should not need to + * retain a reference to the input object, and may destroy it or allow it + * to be garbage collected. + * @option options convertEmptyValues [Boolean] set to true if you would like + * the document client to convert empty values (0-length strings, binary + * buffers, and sets) to be converted to NULL types when persisting to + * DynamoDB. + * @option options wrapNumbers [Boolean] Set to true to return numbers as a + * NumberValue object instead of converting them to native JavaScript numbers. + * This allows for the safe round-trip transport of numbers of arbitrary size. + * @see AWS.DynamoDB.constructor + * + */ + constructor: function DocumentClient(options) { + var self = this; + self.options = options || {}; + self.configure(self.options); + }, + + /** + * @api private + */ + configure: function configure(options) { + var self = this; + self.service = options.service; + self.bindServiceObject(options); + self.attrValue = options.attrValue = + self.service.api.operations.putItem.input.members.Item.value.shape; + }, + + /** + * @api private + */ + bindServiceObject: function bindServiceObject(options) { + var self = this; + options = options || {}; + + if (!self.service) { + self.service = new AWS.DynamoDB(options); + } else { + var config = AWS.util.copy(self.service.config); + self.service = new self.service.constructor.__super__(config); + self.service.config.params = + AWS.util.merge(self.service.config.params || {}, options.params); + } + }, + + /** + * @api private + */ + makeServiceRequest: function(operation, params, callback) { + var self = this; + var request = self.service[operation](params); + self.setupRequest(request); + self.setupResponse(request); + if (typeof callback === 'function') { + request.send(callback); + } + return request; + }, + + /** + * @api private + */ + serviceClientOperationsMap: { + batchGet: 'batchGetItem', + batchWrite: 'batchWriteItem', + delete: 'deleteItem', + get: 'getItem', + put: 'putItem', + query: 'query', + scan: 'scan', + update: 'updateItem', + transactGet: 'transactGetItems', + transactWrite: 'transactWriteItems' + }, + + /** + * Returns the attributes of one or more items from one or more tables + * by delegating to `AWS.DynamoDB.batchGetItem()`. + * + * Supply the same parameters as {AWS.DynamoDB.batchGetItem} with + * `AttributeValue`s substituted by native JavaScript types. + * + * @see AWS.DynamoDB.batchGetItem + * @example Get items from multiple tables + * var params = { + * RequestItems: { + * 'Table-1': { + * Keys: [ + * { + * HashKey: 'haskey', + * NumberRangeKey: 1 + * } + * ] + * }, + * 'Table-2': { + * Keys: [ + * { foo: 'bar' }, + * ] + * } + * } + * }; + * + * var documentClient = new AWS.DynamoDB.DocumentClient(); + * + * documentClient.batchGet(params, function(err, data) { + * if (err) console.log(err); + * else console.log(data); + * }); + * + */ + batchGet: function(params, callback) { + var operation = this.serviceClientOperationsMap['batchGet']; + return this.makeServiceRequest(operation, params, callback); + }, + + /** + * Puts or deletes multiple items in one or more tables by delegating + * to `AWS.DynamoDB.batchWriteItem()`. + * + * Supply the same parameters as {AWS.DynamoDB.batchWriteItem} with + * `AttributeValue`s substituted by native JavaScript types. + * + * @see AWS.DynamoDB.batchWriteItem + * @example Write to and delete from a table + * var params = { + * RequestItems: { + * 'Table-1': [ + * { + * DeleteRequest: { + * Key: { HashKey: 'someKey' } + * } + * }, + * { + * PutRequest: { + * Item: { + * HashKey: 'anotherKey', + * NumAttribute: 1, + * BoolAttribute: true, + * ListAttribute: [1, 'two', false], + * MapAttribute: { foo: 'bar' } + * } + * } + * } + * ] + * } + * }; + * + * var documentClient = new AWS.DynamoDB.DocumentClient(); + * + * documentClient.batchWrite(params, function(err, data) { + * if (err) console.log(err); + * else console.log(data); + * }); + * + */ + batchWrite: function(params, callback) { + var operation = this.serviceClientOperationsMap['batchWrite']; + return this.makeServiceRequest(operation, params, callback); + }, + + /** + * Deletes a single item in a table by primary key by delegating to + * `AWS.DynamoDB.deleteItem()` + * + * Supply the same parameters as {AWS.DynamoDB.deleteItem} with + * `AttributeValue`s substituted by native JavaScript types. + * + * @see AWS.DynamoDB.deleteItem + * @example Delete an item from a table + * var params = { + * TableName : 'Table', + * Key: { + * HashKey: 'hashkey', + * NumberRangeKey: 1 + * } + * }; + * + * var documentClient = new AWS.DynamoDB.DocumentClient(); + * + * documentClient.delete(params, function(err, data) { + * if (err) console.log(err); + * else console.log(data); + * }); + * + */ + delete: function(params, callback) { + var operation = this.serviceClientOperationsMap['delete']; + return this.makeServiceRequest(operation, params, callback); + }, + + /** + * Returns a set of attributes for the item with the given primary key + * by delegating to `AWS.DynamoDB.getItem()`. + * + * Supply the same parameters as {AWS.DynamoDB.getItem} with + * `AttributeValue`s substituted by native JavaScript types. + * + * @see AWS.DynamoDB.getItem + * @example Get an item from a table + * var params = { + * TableName : 'Table', + * Key: { + * HashKey: 'hashkey' + * } + * }; + * + * var documentClient = new AWS.DynamoDB.DocumentClient(); + * + * documentClient.get(params, function(err, data) { + * if (err) console.log(err); + * else console.log(data); + * }); + * + */ + get: function(params, callback) { + var operation = this.serviceClientOperationsMap['get']; + return this.makeServiceRequest(operation, params, callback); + }, + + /** + * Creates a new item, or replaces an old item with a new item by + * delegating to `AWS.DynamoDB.putItem()`. + * + * Supply the same parameters as {AWS.DynamoDB.putItem} with + * `AttributeValue`s substituted by native JavaScript types. + * + * @see AWS.DynamoDB.putItem + * @example Create a new item in a table + * var params = { + * TableName : 'Table', + * Item: { + * HashKey: 'haskey', + * NumAttribute: 1, + * BoolAttribute: true, + * ListAttribute: [1, 'two', false], + * MapAttribute: { foo: 'bar'}, + * NullAttribute: null + * } + * }; + * + * var documentClient = new AWS.DynamoDB.DocumentClient(); + * + * documentClient.put(params, function(err, data) { + * if (err) console.log(err); + * else console.log(data); + * }); + * + */ + put: function(params, callback) { + var operation = this.serviceClientOperationsMap['put']; + return this.makeServiceRequest(operation, params, callback); + }, + + /** + * Edits an existing item's attributes, or adds a new item to the table if + * it does not already exist by delegating to `AWS.DynamoDB.updateItem()`. + * + * Supply the same parameters as {AWS.DynamoDB.updateItem} with + * `AttributeValue`s substituted by native JavaScript types. + * + * @see AWS.DynamoDB.updateItem + * @example Update an item with expressions + * var params = { + * TableName: 'Table', + * Key: { HashKey : 'hashkey' }, + * UpdateExpression: 'set #a = :x + :y', + * ConditionExpression: '#a < :MAX', + * ExpressionAttributeNames: {'#a' : 'Sum'}, + * ExpressionAttributeValues: { + * ':x' : 20, + * ':y' : 45, + * ':MAX' : 100, + * } + * }; + * + * var documentClient = new AWS.DynamoDB.DocumentClient(); + * + * documentClient.update(params, function(err, data) { + * if (err) console.log(err); + * else console.log(data); + * }); + * + */ + update: function(params, callback) { + var operation = this.serviceClientOperationsMap['update']; + return this.makeServiceRequest(operation, params, callback); + }, + + /** + * Returns one or more items and item attributes by accessing every item + * in a table or a secondary index. + * + * Supply the same parameters as {AWS.DynamoDB.scan} with + * `AttributeValue`s substituted by native JavaScript types. + * + * @see AWS.DynamoDB.scan + * @example Scan the table with a filter expression + * var params = { + * TableName : 'Table', + * FilterExpression : 'Year = :this_year', + * ExpressionAttributeValues : {':this_year' : 2015} + * }; + * + * var documentClient = new AWS.DynamoDB.DocumentClient(); + * + * documentClient.scan(params, function(err, data) { + * if (err) console.log(err); + * else console.log(data); + * }); + * + */ + scan: function(params, callback) { + var operation = this.serviceClientOperationsMap['scan']; + return this.makeServiceRequest(operation, params, callback); + }, + + /** + * Directly access items from a table by primary key or a secondary index. + * + * Supply the same parameters as {AWS.DynamoDB.query} with + * `AttributeValue`s substituted by native JavaScript types. + * + * @see AWS.DynamoDB.query + * @example Query an index + * var params = { + * TableName: 'Table', + * IndexName: 'Index', + * KeyConditionExpression: 'HashKey = :hkey and RangeKey > :rkey', + * ExpressionAttributeValues: { + * ':hkey': 'key', + * ':rkey': 2015 + * } + * }; + * + * var documentClient = new AWS.DynamoDB.DocumentClient(); + * + * documentClient.query(params, function(err, data) { + * if (err) console.log(err); + * else console.log(data); + * }); + * + */ + query: function(params, callback) { + var operation = this.serviceClientOperationsMap['query']; + return this.makeServiceRequest(operation, params, callback); + }, + + /** + * Synchronous write operation that groups up to 100 action requests. + * + * Supply the same parameters as {AWS.DynamoDB.transactWriteItems} with + * `AttributeValue`s substituted by native JavaScript types. + * + * @see AWS.DynamoDB.transactWriteItems + * @example Get items from multiple tables + * var params = { + * TransactItems: [{ + * Put: { + * TableName : 'Table0', + * Item: { + * HashKey: 'haskey', + * NumAttribute: 1, + * BoolAttribute: true, + * ListAttribute: [1, 'two', false], + * MapAttribute: { foo: 'bar'}, + * NullAttribute: null + * } + * } + * }, { + * Update: { + * TableName: 'Table1', + * Key: { HashKey : 'hashkey' }, + * UpdateExpression: 'set #a = :x + :y', + * ConditionExpression: '#a < :MAX', + * ExpressionAttributeNames: {'#a' : 'Sum'}, + * ExpressionAttributeValues: { + * ':x' : 20, + * ':y' : 45, + * ':MAX' : 100, + * } + * } + * }] + * }; + * + * documentClient.transactWrite(params, function(err, data) { + * if (err) console.log(err); + * else console.log(data); + * }); + */ + transactWrite: function(params, callback) { + var operation = this.serviceClientOperationsMap['transactWrite']; + return this.makeServiceRequest(operation, params, callback); + }, + + /** + * Atomically retrieves multiple items from one or more tables (but not from indexes) + * in a single account and region. + * + * Supply the same parameters as {AWS.DynamoDB.transactGetItems} with + * `AttributeValue`s substituted by native JavaScript types. + * + * @see AWS.DynamoDB.transactGetItems + * @example Get items from multiple tables + * var params = { + * TransactItems: [{ + * Get: { + * TableName : 'Table0', + * Key: { + * HashKey: 'hashkey0' + * } + * } + * }, { + * Get: { + * TableName : 'Table1', + * Key: { + * HashKey: 'hashkey1' + * } + * } + * }] + * }; + * + * documentClient.transactGet(params, function(err, data) { + * if (err) console.log(err); + * else console.log(data); + * }); + */ + transactGet: function(params, callback) { + var operation = this.serviceClientOperationsMap['transactGet']; + return this.makeServiceRequest(operation, params, callback); + }, + + /** + * Creates a set of elements inferring the type of set from + * the type of the first element. Amazon DynamoDB currently supports + * the number sets, string sets, and binary sets. For more information + * about DynamoDB data types see the documentation on the + * [Amazon DynamoDB Data Model](http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataModel.html#DataModel.DataTypes). + * + * @param list [Array] Collection to represent your DynamoDB Set + * @param options [map] + * * **validate** [Boolean] set to true if you want to validate the type + * of each element in the set. Defaults to `false`. + * @example Creating a number set + * var documentClient = new AWS.DynamoDB.DocumentClient(); + * + * var params = { + * Item: { + * hashkey: 'hashkey' + * numbers: documentClient.createSet([1, 2, 3]); + * } + * }; + * + * documentClient.put(params, function(err, data) { + * if (err) console.log(err); + * else console.log(data); + * }); + * + */ + createSet: function(list, options) { + options = options || {}; + return new DynamoDBSet(list, options); + }, + + /** + * @api private + */ + getTranslator: function() { + return new Translator(this.options); + }, + + /** + * @api private + */ + setupRequest: function setupRequest(request) { + var self = this; + var translator = self.getTranslator(); + var operation = request.operation; + var inputShape = request.service.api.operations[operation].input; + request._events.validate.unshift(function(req) { + req.rawParams = AWS.util.copy(req.params); + req.params = translator.translateInput(req.rawParams, inputShape); + }); + }, + + /** + * @api private + */ + setupResponse: function setupResponse(request) { + var self = this; + var translator = self.getTranslator(); + var outputShape = self.service.api.operations[request.operation].output; + request.on('extractData', function(response) { + response.data = translator.translateOutput(response.data, outputShape); + }); + + var response = request.response; + response.nextPage = function(cb) { + var resp = this; + var req = resp.request; + var config; + var service = req.service; + var operation = req.operation; + try { + config = service.paginationConfig(operation, true); + } catch (e) { resp.error = e; } + + if (!resp.hasNextPage()) { + if (cb) cb(resp.error, null); + else if (resp.error) throw resp.error; + return null; + } + + var params = AWS.util.copy(req.rawParams); + if (!resp.nextPageTokens) { + return cb ? cb(null, null) : null; + } else { + var inputTokens = config.inputToken; + if (typeof inputTokens === 'string') inputTokens = [inputTokens]; + for (var i = 0; i < inputTokens.length; i++) { + params[inputTokens[i]] = resp.nextPageTokens[i]; + } + return self[operation](params, cb); + } + }; + } + +}); + +/** + * @api private + */ +module.exports = AWS.DynamoDB.DocumentClient; + + +/***/ }), + +/***/ 91593: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var util = (__nccwpck_require__(28437).util); + +/** + * An object recognizable as a numeric value that stores the underlying number + * as a string. + * + * Intended to be a deserialization target for the DynamoDB Document Client when + * the `wrapNumbers` flag is set. This allows for numeric values that lose + * precision when converted to JavaScript's `number` type. + */ +var DynamoDBNumberValue = util.inherit({ + constructor: function NumberValue(value) { + this.wrapperName = 'NumberValue'; + this.value = value.toString(); + }, + + /** + * Render the underlying value as a number when converting to JSON. + */ + toJSON: function () { + return this.toNumber(); + }, + + /** + * Convert the underlying value to a JavaScript number. + */ + toNumber: function () { + return Number(this.value); + }, + + /** + * Return a string representing the unaltered value provided to the + * constructor. + */ + toString: function () { + return this.value; + } +}); + +/** + * @api private + */ +module.exports = DynamoDBNumberValue; + + +/***/ }), + +/***/ 20304: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var util = (__nccwpck_require__(28437).util); +var typeOf = (__nccwpck_require__(48084).typeOf); + +/** + * @api private + */ +var memberTypeToSetType = { + 'String': 'String', + 'Number': 'Number', + 'NumberValue': 'Number', + 'Binary': 'Binary' +}; + +/** + * @api private + */ +var DynamoDBSet = util.inherit({ + + constructor: function Set(list, options) { + options = options || {}; + this.wrapperName = 'Set'; + this.initialize(list, options.validate); + }, + + initialize: function(list, validate) { + var self = this; + self.values = [].concat(list); + self.detectType(); + if (validate) { + self.validate(); + } + }, + + detectType: function() { + this.type = memberTypeToSetType[typeOf(this.values[0])]; + if (!this.type) { + throw util.error(new Error(), { + code: 'InvalidSetType', + message: 'Sets can contain string, number, or binary values' + }); + } + }, + + validate: function() { + var self = this; + var length = self.values.length; + var values = self.values; + for (var i = 0; i < length; i++) { + if (memberTypeToSetType[typeOf(values[i])] !== self.type) { + throw util.error(new Error(), { + code: 'InvalidType', + message: self.type + ' Set contains ' + typeOf(values[i]) + ' value' + }); + } + } + }, + + /** + * Render the underlying values only when converting to JSON. + */ + toJSON: function() { + var self = this; + return self.values; + } + +}); + +/** + * @api private + */ +module.exports = DynamoDBSet; + + +/***/ }), + +/***/ 34222: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var util = (__nccwpck_require__(28437).util); +var convert = __nccwpck_require__(76663); + +var Translator = function(options) { + options = options || {}; + this.attrValue = options.attrValue; + this.convertEmptyValues = Boolean(options.convertEmptyValues); + this.wrapNumbers = Boolean(options.wrapNumbers); +}; + +Translator.prototype.translateInput = function(value, shape) { + this.mode = 'input'; + return this.translate(value, shape); +}; + +Translator.prototype.translateOutput = function(value, shape) { + this.mode = 'output'; + return this.translate(value, shape); +}; + +Translator.prototype.translate = function(value, shape) { + var self = this; + if (!shape || value === undefined) return undefined; + + if (shape.shape === self.attrValue) { + return convert[self.mode](value, { + convertEmptyValues: self.convertEmptyValues, + wrapNumbers: self.wrapNumbers, + }); + } + switch (shape.type) { + case 'structure': return self.translateStructure(value, shape); + case 'map': return self.translateMap(value, shape); + case 'list': return self.translateList(value, shape); + default: return self.translateScalar(value, shape); + } +}; + +Translator.prototype.translateStructure = function(structure, shape) { + var self = this; + if (structure == null) return undefined; + + var struct = {}; + util.each(structure, function(name, value) { + var memberShape = shape.members[name]; + if (memberShape) { + var result = self.translate(value, memberShape); + if (result !== undefined) struct[name] = result; + } + }); + return struct; +}; + +Translator.prototype.translateList = function(list, shape) { + var self = this; + if (list == null) return undefined; + + var out = []; + util.arrayEach(list, function(value) { + var result = self.translate(value, shape.member); + if (result === undefined) out.push(null); + else out.push(result); + }); + return out; +}; + +Translator.prototype.translateMap = function(map, shape) { + var self = this; + if (map == null) return undefined; + + var out = {}; + util.each(map, function(key, value) { + var result = self.translate(value, shape.value); + if (result === undefined) out[key] = null; + else out[key] = result; + }); + return out; +}; + +Translator.prototype.translateScalar = function(value, shape) { + return shape.toType(value); +}; + +/** + * @api private + */ +module.exports = Translator; + + +/***/ }), + +/***/ 48084: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var util = (__nccwpck_require__(28437).util); + +function typeOf(data) { + if (data === null && typeof data === 'object') { + return 'null'; + } else if (data !== undefined && isBinary(data)) { + return 'Binary'; + } else if (data !== undefined && data.constructor) { + return data.wrapperName || util.typeName(data.constructor); + } else if (data !== undefined && typeof data === 'object') { + // this object is the result of Object.create(null), hence the absence of a + // defined constructor + return 'Object'; + } else { + return 'undefined'; + } +} + +function isBinary(data) { + var types = [ + 'Buffer', 'File', 'Blob', 'ArrayBuffer', 'DataView', + 'Int8Array', 'Uint8Array', 'Uint8ClampedArray', + 'Int16Array', 'Uint16Array', 'Int32Array', 'Uint32Array', + 'Float32Array', 'Float64Array' + ]; + if (util.isNode()) { + var Stream = util.stream.Stream; + if (util.Buffer.isBuffer(data) || data instanceof Stream) { + return true; + } + } + + for (var i = 0; i < types.length; i++) { + if (data !== undefined && data.constructor) { + if (util.isType(data, types[i])) return true; + if (util.typeName(data.constructor) === types[i]) return true; + } + } + + return false; +} + +/** + * @api private + */ +module.exports = { + typeOf: typeOf, + isBinary: isBinary +}; + + +/***/ }), + +/***/ 63727: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var eventMessageChunker = (__nccwpck_require__(73630).eventMessageChunker); +var parseEvent = (__nccwpck_require__(52123).parseEvent); + +function createEventStream(body, parser, model) { + var eventMessages = eventMessageChunker(body); + + var events = []; + + for (var i = 0; i < eventMessages.length; i++) { + events.push(parseEvent(parser, eventMessages[i], model)); + } + + return events; +} + +/** + * @api private + */ +module.exports = { + createEventStream: createEventStream +}; + + +/***/ }), + +/***/ 18518: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var util = (__nccwpck_require__(28437).util); +var Transform = (__nccwpck_require__(12781).Transform); +var allocBuffer = util.buffer.alloc; + +/** @type {Transform} */ +function EventMessageChunkerStream(options) { + Transform.call(this, options); + + this.currentMessageTotalLength = 0; + this.currentMessagePendingLength = 0; + /** @type {Buffer} */ + this.currentMessage = null; + + /** @type {Buffer} */ + this.messageLengthBuffer = null; +} + +EventMessageChunkerStream.prototype = Object.create(Transform.prototype); + +/** + * + * @param {Buffer} chunk + * @param {string} encoding + * @param {*} callback + */ +EventMessageChunkerStream.prototype._transform = function(chunk, encoding, callback) { + var chunkLength = chunk.length; + var currentOffset = 0; + + while (currentOffset < chunkLength) { + // create new message if necessary + if (!this.currentMessage) { + // working on a new message, determine total length + var bytesRemaining = chunkLength - currentOffset; + // prevent edge case where total length spans 2 chunks + if (!this.messageLengthBuffer) { + this.messageLengthBuffer = allocBuffer(4); + } + var numBytesForTotal = Math.min( + 4 - this.currentMessagePendingLength, // remaining bytes to fill the messageLengthBuffer + bytesRemaining // bytes left in chunk + ); + + chunk.copy( + this.messageLengthBuffer, + this.currentMessagePendingLength, + currentOffset, + currentOffset + numBytesForTotal + ); + + this.currentMessagePendingLength += numBytesForTotal; + currentOffset += numBytesForTotal; + + if (this.currentMessagePendingLength < 4) { + // not enough information to create the current message + break; + } + this.allocateMessage(this.messageLengthBuffer.readUInt32BE(0)); + this.messageLengthBuffer = null; + } + + // write data into current message + var numBytesToWrite = Math.min( + this.currentMessageTotalLength - this.currentMessagePendingLength, // number of bytes left to complete message + chunkLength - currentOffset // number of bytes left in the original chunk + ); + chunk.copy( + this.currentMessage, // target buffer + this.currentMessagePendingLength, // target offset + currentOffset, // chunk offset + currentOffset + numBytesToWrite // chunk end to write + ); + this.currentMessagePendingLength += numBytesToWrite; + currentOffset += numBytesToWrite; + + // check if a message is ready to be pushed + if (this.currentMessageTotalLength && this.currentMessageTotalLength === this.currentMessagePendingLength) { + // push out the message + this.push(this.currentMessage); + // cleanup + this.currentMessage = null; + this.currentMessageTotalLength = 0; + this.currentMessagePendingLength = 0; + } + } + + callback(); +}; + +EventMessageChunkerStream.prototype._flush = function(callback) { + if (this.currentMessageTotalLength) { + if (this.currentMessageTotalLength === this.currentMessagePendingLength) { + callback(null, this.currentMessage); + } else { + callback(new Error('Truncated event message received.')); + } + } else { + callback(); + } +}; + +/** + * @param {number} size Size of the message to be allocated. + * @api private + */ +EventMessageChunkerStream.prototype.allocateMessage = function(size) { + if (typeof size !== 'number') { + throw new Error('Attempted to allocate an event message where size was not a number: ' + size); + } + this.currentMessageTotalLength = size; + this.currentMessagePendingLength = 4; + this.currentMessage = allocBuffer(size); + this.currentMessage.writeUInt32BE(size, 0); +}; + +/** + * @api private + */ +module.exports = { + EventMessageChunkerStream: EventMessageChunkerStream +}; + + +/***/ }), + +/***/ 73630: +/***/ ((module) => { + +/** + * Takes in a buffer of event messages and splits them into individual messages. + * @param {Buffer} buffer + * @api private + */ +function eventMessageChunker(buffer) { + /** @type Buffer[] */ + var messages = []; + var offset = 0; + + while (offset < buffer.length) { + var totalLength = buffer.readInt32BE(offset); + + // create new buffer for individual message (shares memory with original) + var message = buffer.slice(offset, totalLength + offset); + // increment offset to it starts at the next message + offset += totalLength; + + messages.push(message); + } + + return messages; +} + +/** + * @api private + */ +module.exports = { + eventMessageChunker: eventMessageChunker +}; + + +/***/ }), + +/***/ 93773: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var Transform = (__nccwpck_require__(12781).Transform); +var parseEvent = (__nccwpck_require__(52123).parseEvent); + +/** @type {Transform} */ +function EventUnmarshallerStream(options) { + options = options || {}; + // set output to object mode + options.readableObjectMode = true; + Transform.call(this, options); + this._readableState.objectMode = true; + + this.parser = options.parser; + this.eventStreamModel = options.eventStreamModel; +} + +EventUnmarshallerStream.prototype = Object.create(Transform.prototype); + +/** + * + * @param {Buffer} chunk + * @param {string} encoding + * @param {*} callback + */ +EventUnmarshallerStream.prototype._transform = function(chunk, encoding, callback) { + try { + var event = parseEvent(this.parser, chunk, this.eventStreamModel); + this.push(event); + return callback(); + } catch (err) { + callback(err); + } +}; + +/** + * @api private + */ +module.exports = { + EventUnmarshallerStream: EventUnmarshallerStream +}; + + +/***/ }), + +/***/ 48583: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var util = (__nccwpck_require__(28437).util); +var toBuffer = util.buffer.toBuffer; + +/** + * A lossless representation of a signed, 64-bit integer. Instances of this + * class may be used in arithmetic expressions as if they were numeric + * primitives, but the binary representation will be preserved unchanged as the + * `bytes` property of the object. The bytes should be encoded as big-endian, + * two's complement integers. + * @param {Buffer} bytes + * + * @api private + */ +function Int64(bytes) { + if (bytes.length !== 8) { + throw new Error('Int64 buffers must be exactly 8 bytes'); + } + if (!util.Buffer.isBuffer(bytes)) bytes = toBuffer(bytes); + + this.bytes = bytes; +} + +/** + * @param {number} number + * @returns {Int64} + * + * @api private + */ +Int64.fromNumber = function(number) { + if (number > 9223372036854775807 || number < -9223372036854775808) { + throw new Error( + number + ' is too large (or, if negative, too small) to represent as an Int64' + ); + } + + var bytes = new Uint8Array(8); + for ( + var i = 7, remaining = Math.abs(Math.round(number)); + i > -1 && remaining > 0; + i--, remaining /= 256 + ) { + bytes[i] = remaining; + } + + if (number < 0) { + negate(bytes); + } + + return new Int64(bytes); +}; + +/** + * @returns {number} + * + * @api private + */ +Int64.prototype.valueOf = function() { + var bytes = this.bytes.slice(0); + var negative = bytes[0] & 128; + if (negative) { + negate(bytes); + } + + return parseInt(bytes.toString('hex'), 16) * (negative ? -1 : 1); +}; + +Int64.prototype.toString = function() { + return String(this.valueOf()); +}; + +/** + * @param {Buffer} bytes + * + * @api private + */ +function negate(bytes) { + for (var i = 0; i < 8; i++) { + bytes[i] ^= 0xFF; + } + for (var i = 7; i > -1; i--) { + bytes[i]++; + if (bytes[i] !== 0) { + break; + } + } +} + +/** + * @api private + */ +module.exports = { + Int64: Int64 +}; + + +/***/ }), + +/***/ 52123: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var parseMessage = (__nccwpck_require__(30866).parseMessage); + +/** + * + * @param {*} parser + * @param {Buffer} message + * @param {*} shape + * @api private + */ +function parseEvent(parser, message, shape) { + var parsedMessage = parseMessage(message); + + // check if message is an event or error + var messageType = parsedMessage.headers[':message-type']; + if (messageType) { + if (messageType.value === 'error') { + throw parseError(parsedMessage); + } else if (messageType.value !== 'event') { + // not sure how to parse non-events/non-errors, ignore for now + return; + } + } + + // determine event type + var eventType = parsedMessage.headers[':event-type']; + // check that the event type is modeled + var eventModel = shape.members[eventType.value]; + if (!eventModel) { + return; + } + + var result = {}; + // check if an event payload exists + var eventPayloadMemberName = eventModel.eventPayloadMemberName; + if (eventPayloadMemberName) { + var payloadShape = eventModel.members[eventPayloadMemberName]; + // if the shape is binary, return the byte array + if (payloadShape.type === 'binary') { + result[eventPayloadMemberName] = parsedMessage.body; + } else { + result[eventPayloadMemberName] = parser.parse(parsedMessage.body.toString(), payloadShape); + } + } + + // read event headers + var eventHeaderNames = eventModel.eventHeaderMemberNames; + for (var i = 0; i < eventHeaderNames.length; i++) { + var name = eventHeaderNames[i]; + if (parsedMessage.headers[name]) { + // parse the header! + result[name] = eventModel.members[name].toType(parsedMessage.headers[name].value); + } + } + + var output = {}; + output[eventType.value] = result; + return output; +} + +function parseError(message) { + var errorCode = message.headers[':error-code']; + var errorMessage = message.headers[':error-message']; + var error = new Error(errorMessage.value || errorMessage); + error.code = error.name = errorCode.value || errorCode; + return error; +} + +/** + * @api private + */ +module.exports = { + parseEvent: parseEvent +}; + + +/***/ }), + +/***/ 30866: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var Int64 = (__nccwpck_require__(48583).Int64); + +var splitMessage = (__nccwpck_require__(71765).splitMessage); + +var BOOLEAN_TAG = 'boolean'; +var BYTE_TAG = 'byte'; +var SHORT_TAG = 'short'; +var INT_TAG = 'integer'; +var LONG_TAG = 'long'; +var BINARY_TAG = 'binary'; +var STRING_TAG = 'string'; +var TIMESTAMP_TAG = 'timestamp'; +var UUID_TAG = 'uuid'; + +/** + * @api private + * + * @param {Buffer} headers + */ +function parseHeaders(headers) { + var out = {}; + var position = 0; + while (position < headers.length) { + var nameLength = headers.readUInt8(position++); + var name = headers.slice(position, position + nameLength).toString(); + position += nameLength; + switch (headers.readUInt8(position++)) { + case 0 /* boolTrue */: + out[name] = { + type: BOOLEAN_TAG, + value: true + }; + break; + case 1 /* boolFalse */: + out[name] = { + type: BOOLEAN_TAG, + value: false + }; + break; + case 2 /* byte */: + out[name] = { + type: BYTE_TAG, + value: headers.readInt8(position++) + }; + break; + case 3 /* short */: + out[name] = { + type: SHORT_TAG, + value: headers.readInt16BE(position) + }; + position += 2; + break; + case 4 /* integer */: + out[name] = { + type: INT_TAG, + value: headers.readInt32BE(position) + }; + position += 4; + break; + case 5 /* long */: + out[name] = { + type: LONG_TAG, + value: new Int64(headers.slice(position, position + 8)) + }; + position += 8; + break; + case 6 /* byteArray */: + var binaryLength = headers.readUInt16BE(position); + position += 2; + out[name] = { + type: BINARY_TAG, + value: headers.slice(position, position + binaryLength) + }; + position += binaryLength; + break; + case 7 /* string */: + var stringLength = headers.readUInt16BE(position); + position += 2; + out[name] = { + type: STRING_TAG, + value: headers.slice( + position, + position + stringLength + ).toString() + }; + position += stringLength; + break; + case 8 /* timestamp */: + out[name] = { + type: TIMESTAMP_TAG, + value: new Date( + new Int64(headers.slice(position, position + 8)) + .valueOf() + ) + }; + position += 8; + break; + case 9 /* uuid */: + var uuidChars = headers.slice(position, position + 16) + .toString('hex'); + position += 16; + out[name] = { + type: UUID_TAG, + value: uuidChars.substr(0, 8) + '-' + + uuidChars.substr(8, 4) + '-' + + uuidChars.substr(12, 4) + '-' + + uuidChars.substr(16, 4) + '-' + + uuidChars.substr(20) + }; + break; + default: + throw new Error('Unrecognized header type tag'); + } + } + return out; +} + +function parseMessage(message) { + var parsed = splitMessage(message); + return { headers: parseHeaders(parsed.headers), body: parsed.body }; +} + +/** + * @api private + */ +module.exports = { + parseMessage: parseMessage +}; + + +/***/ }), + +/***/ 71765: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var util = (__nccwpck_require__(28437).util); +var toBuffer = util.buffer.toBuffer; + +// All prelude components are unsigned, 32-bit integers +var PRELUDE_MEMBER_LENGTH = 4; +// The prelude consists of two components +var PRELUDE_LENGTH = PRELUDE_MEMBER_LENGTH * 2; +// Checksums are always CRC32 hashes. +var CHECKSUM_LENGTH = 4; +// Messages must include a full prelude, a prelude checksum, and a message checksum +var MINIMUM_MESSAGE_LENGTH = PRELUDE_LENGTH + CHECKSUM_LENGTH * 2; + +/** + * @api private + * + * @param {Buffer} message + */ +function splitMessage(message) { + if (!util.Buffer.isBuffer(message)) message = toBuffer(message); + + if (message.length < MINIMUM_MESSAGE_LENGTH) { + throw new Error('Provided message too short to accommodate event stream message overhead'); + } + + if (message.length !== message.readUInt32BE(0)) { + throw new Error('Reported message length does not match received message length'); + } + + var expectedPreludeChecksum = message.readUInt32BE(PRELUDE_LENGTH); + + if ( + expectedPreludeChecksum !== util.crypto.crc32( + message.slice(0, PRELUDE_LENGTH) + ) + ) { + throw new Error( + 'The prelude checksum specified in the message (' + + expectedPreludeChecksum + + ') does not match the calculated CRC32 checksum.' + ); + } + + var expectedMessageChecksum = message.readUInt32BE(message.length - CHECKSUM_LENGTH); + + if ( + expectedMessageChecksum !== util.crypto.crc32( + message.slice(0, message.length - CHECKSUM_LENGTH) + ) + ) { + throw new Error( + 'The message checksum did not match the expected value of ' + + expectedMessageChecksum + ); + } + + var headersStart = PRELUDE_LENGTH + CHECKSUM_LENGTH; + var headersEnd = headersStart + message.readUInt32BE(PRELUDE_MEMBER_LENGTH); + + return { + headers: message.slice(headersStart, headersEnd), + body: message.slice(headersEnd, message.length - CHECKSUM_LENGTH), + }; +} + +/** + * @api private + */ +module.exports = { + splitMessage: splitMessage +}; + + +/***/ }), + +/***/ 69643: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/** + * What is necessary to create an event stream in node? + * - http response stream + * - parser + * - event stream model + */ + +var EventMessageChunkerStream = (__nccwpck_require__(18518).EventMessageChunkerStream); +var EventUnmarshallerStream = (__nccwpck_require__(93773).EventUnmarshallerStream); + +function createEventStream(stream, parser, model) { + var eventStream = new EventUnmarshallerStream({ + parser: parser, + eventStreamModel: model + }); + + var eventMessageChunker = new EventMessageChunkerStream(); + + stream.pipe( + eventMessageChunker + ).pipe(eventStream); + + stream.on('error', function(err) { + eventMessageChunker.emit('error', err); + }); + + eventMessageChunker.on('error', function(err) { + eventStream.emit('error', err); + }); + + return eventStream; +} + +/** + * @api private + */ +module.exports = { + createEventStream: createEventStream +}; + + +/***/ }), + +/***/ 54995: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var SequentialExecutor = __nccwpck_require__(55948); +var DISCOVER_ENDPOINT = (__nccwpck_require__(45313).discoverEndpoint); +/** + * The namespace used to register global event listeners for request building + * and sending. + */ +AWS.EventListeners = { + /** + * @!attribute VALIDATE_CREDENTIALS + * A request listener that validates whether the request is being + * sent with credentials. + * Handles the {AWS.Request~validate 'validate' Request event} + * @example Sending a request without validating credentials + * var listener = AWS.EventListeners.Core.VALIDATE_CREDENTIALS; + * request.removeListener('validate', listener); + * @readonly + * @return [Function] + * @!attribute VALIDATE_REGION + * A request listener that validates whether the region is set + * for a request. + * Handles the {AWS.Request~validate 'validate' Request event} + * @example Sending a request without validating region configuration + * var listener = AWS.EventListeners.Core.VALIDATE_REGION; + * request.removeListener('validate', listener); + * @readonly + * @return [Function] + * @!attribute VALIDATE_PARAMETERS + * A request listener that validates input parameters in a request. + * Handles the {AWS.Request~validate 'validate' Request event} + * @example Sending a request without validating parameters + * var listener = AWS.EventListeners.Core.VALIDATE_PARAMETERS; + * request.removeListener('validate', listener); + * @example Disable parameter validation globally + * AWS.EventListeners.Core.removeListener('validate', + * AWS.EventListeners.Core.VALIDATE_REGION); + * @readonly + * @return [Function] + * @!attribute SEND + * A request listener that initiates the HTTP connection for a + * request being sent. Handles the {AWS.Request~send 'send' Request event} + * @example Replacing the HTTP handler + * var listener = AWS.EventListeners.Core.SEND; + * request.removeListener('send', listener); + * request.on('send', function(response) { + * customHandler.send(response); + * }); + * @return [Function] + * @readonly + * @!attribute HTTP_DATA + * A request listener that reads data from the HTTP connection in order + * to build the response data. + * Handles the {AWS.Request~httpData 'httpData' Request event}. + * Remove this handler if you are overriding the 'httpData' event and + * do not want extra data processing and buffering overhead. + * @example Disabling default data processing + * var listener = AWS.EventListeners.Core.HTTP_DATA; + * request.removeListener('httpData', listener); + * @return [Function] + * @readonly + */ + Core: {} /* doc hack */ +}; + +/** + * @api private + */ +function getOperationAuthtype(req) { + if (!req.service.api.operations) { + return ''; + } + var operation = req.service.api.operations[req.operation]; + return operation ? operation.authtype : ''; +} + +/** + * @api private + */ +function getIdentityType(req) { + var service = req.service; + + if (service.config.signatureVersion) { + return service.config.signatureVersion; + } + + if (service.api.signatureVersion) { + return service.api.signatureVersion; + } + + return getOperationAuthtype(req); +} + +AWS.EventListeners = { + Core: new SequentialExecutor().addNamedListeners(function(add, addAsync) { + addAsync( + 'VALIDATE_CREDENTIALS', 'validate', + function VALIDATE_CREDENTIALS(req, done) { + if (!req.service.api.signatureVersion && !req.service.config.signatureVersion) return done(); // none + + var identityType = getIdentityType(req); + if (identityType === 'bearer') { + req.service.config.getToken(function(err) { + if (err) { + req.response.error = AWS.util.error(err, {code: 'TokenError'}); + } + done(); + }); + return; + } + + req.service.config.getCredentials(function(err) { + if (err) { + req.response.error = AWS.util.error(err, + { + code: 'CredentialsError', + message: 'Missing credentials in config, if using AWS_CONFIG_FILE, set AWS_SDK_LOAD_CONFIG=1' + } + ); + } + done(); + }); + }); + + add('VALIDATE_REGION', 'validate', function VALIDATE_REGION(req) { + if (!req.service.isGlobalEndpoint) { + var dnsHostRegex = new RegExp(/^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9])$/); + if (!req.service.config.region) { + req.response.error = AWS.util.error(new Error(), + {code: 'ConfigError', message: 'Missing region in config'}); + } else if (!dnsHostRegex.test(req.service.config.region)) { + req.response.error = AWS.util.error(new Error(), + {code: 'ConfigError', message: 'Invalid region in config'}); + } + } + }); + + add('BUILD_IDEMPOTENCY_TOKENS', 'validate', function BUILD_IDEMPOTENCY_TOKENS(req) { + if (!req.service.api.operations) { + return; + } + var operation = req.service.api.operations[req.operation]; + if (!operation) { + return; + } + var idempotentMembers = operation.idempotentMembers; + if (!idempotentMembers.length) { + return; + } + // creates a copy of params so user's param object isn't mutated + var params = AWS.util.copy(req.params); + for (var i = 0, iLen = idempotentMembers.length; i < iLen; i++) { + if (!params[idempotentMembers[i]]) { + // add the member + params[idempotentMembers[i]] = AWS.util.uuid.v4(); + } + } + req.params = params; + }); + + add('VALIDATE_PARAMETERS', 'validate', function VALIDATE_PARAMETERS(req) { + if (!req.service.api.operations) { + return; + } + var rules = req.service.api.operations[req.operation].input; + var validation = req.service.config.paramValidation; + new AWS.ParamValidator(validation).validate(rules, req.params); + }); + + add('COMPUTE_CHECKSUM', 'afterBuild', function COMPUTE_CHECKSUM(req) { + if (!req.service.api.operations) { + return; + } + var operation = req.service.api.operations[req.operation]; + if (!operation) { + return; + } + var body = req.httpRequest.body; + var isNonStreamingPayload = body && (AWS.util.Buffer.isBuffer(body) || typeof body === 'string'); + var headers = req.httpRequest.headers; + if ( + operation.httpChecksumRequired && + req.service.config.computeChecksums && + isNonStreamingPayload && + !headers['Content-MD5'] + ) { + var md5 = AWS.util.crypto.md5(body, 'base64'); + headers['Content-MD5'] = md5; + } + }); + + addAsync('COMPUTE_SHA256', 'afterBuild', function COMPUTE_SHA256(req, done) { + req.haltHandlersOnError(); + if (!req.service.api.operations) { + return; + } + var operation = req.service.api.operations[req.operation]; + var authtype = operation ? operation.authtype : ''; + if (!req.service.api.signatureVersion && !authtype && !req.service.config.signatureVersion) return done(); // none + if (req.service.getSignerClass(req) === AWS.Signers.V4) { + var body = req.httpRequest.body || ''; + if (authtype.indexOf('unsigned-body') >= 0) { + req.httpRequest.headers['X-Amz-Content-Sha256'] = 'UNSIGNED-PAYLOAD'; + return done(); + } + AWS.util.computeSha256(body, function(err, sha) { + if (err) { + done(err); + } + else { + req.httpRequest.headers['X-Amz-Content-Sha256'] = sha; + done(); + } + }); + } else { + done(); + } + }); + + add('SET_CONTENT_LENGTH', 'afterBuild', function SET_CONTENT_LENGTH(req) { + var authtype = getOperationAuthtype(req); + var payloadMember = AWS.util.getRequestPayloadShape(req); + if (req.httpRequest.headers['Content-Length'] === undefined) { + try { + var length = AWS.util.string.byteLength(req.httpRequest.body); + req.httpRequest.headers['Content-Length'] = length; + } catch (err) { + if (payloadMember && payloadMember.isStreaming) { + if (payloadMember.requiresLength) { + //streaming payload requires length(s3, glacier) + throw err; + } else if (authtype.indexOf('unsigned-body') >= 0) { + //unbounded streaming payload(lex, mediastore) + req.httpRequest.headers['Transfer-Encoding'] = 'chunked'; + return; + } else { + throw err; + } + } + throw err; + } + } + }); + + add('SET_HTTP_HOST', 'afterBuild', function SET_HTTP_HOST(req) { + req.httpRequest.headers['Host'] = req.httpRequest.endpoint.host; + }); + + add('SET_TRACE_ID', 'afterBuild', function SET_TRACE_ID(req) { + var traceIdHeaderName = 'X-Amzn-Trace-Id'; + if (AWS.util.isNode() && !Object.hasOwnProperty.call(req.httpRequest.headers, traceIdHeaderName)) { + var ENV_LAMBDA_FUNCTION_NAME = 'AWS_LAMBDA_FUNCTION_NAME'; + var ENV_TRACE_ID = '_X_AMZN_TRACE_ID'; + var functionName = process.env[ENV_LAMBDA_FUNCTION_NAME]; + var traceId = process.env[ENV_TRACE_ID]; + if ( + typeof functionName === 'string' && + functionName.length > 0 && + typeof traceId === 'string' && + traceId.length > 0 + ) { + req.httpRequest.headers[traceIdHeaderName] = traceId; + } + } + }); + + add('RESTART', 'restart', function RESTART() { + var err = this.response.error; + if (!err || !err.retryable) return; + + this.httpRequest = new AWS.HttpRequest( + this.service.endpoint, + this.service.region + ); + + if (this.response.retryCount < this.service.config.maxRetries) { + this.response.retryCount++; + } else { + this.response.error = null; + } + }); + + var addToHead = true; + addAsync('DISCOVER_ENDPOINT', 'sign', DISCOVER_ENDPOINT, addToHead); + + addAsync('SIGN', 'sign', function SIGN(req, done) { + var service = req.service; + var identityType = getIdentityType(req); + if (!identityType || identityType.length === 0) return done(); // none + + if (identityType === 'bearer') { + service.config.getToken(function (err, token) { + if (err) { + req.response.error = err; + return done(); + } + + try { + var SignerClass = service.getSignerClass(req); + var signer = new SignerClass(req.httpRequest); + signer.addAuthorization(token); + } catch (e) { + req.response.error = e; + } + done(); + }); + } else { + service.config.getCredentials(function (err, credentials) { + if (err) { + req.response.error = err; + return done(); + } + + try { + var date = service.getSkewCorrectedDate(); + var SignerClass = service.getSignerClass(req); + var operations = req.service.api.operations || {}; + var operation = operations[req.operation]; + var signer = new SignerClass(req.httpRequest, + service.getSigningName(req), + { + signatureCache: service.config.signatureCache, + operation: operation, + signatureVersion: service.api.signatureVersion + }); + signer.setServiceClientId(service._clientId); + + // clear old authorization headers + delete req.httpRequest.headers['Authorization']; + delete req.httpRequest.headers['Date']; + delete req.httpRequest.headers['X-Amz-Date']; + + // add new authorization + signer.addAuthorization(credentials, date); + req.signedAt = date; + } catch (e) { + req.response.error = e; + } + done(); + }); + + } + }); + + add('VALIDATE_RESPONSE', 'validateResponse', function VALIDATE_RESPONSE(resp) { + if (this.service.successfulResponse(resp, this)) { + resp.data = {}; + resp.error = null; + } else { + resp.data = null; + resp.error = AWS.util.error(new Error(), + {code: 'UnknownError', message: 'An unknown error occurred.'}); + } + }); + + add('ERROR', 'error', function ERROR(err, resp) { + var awsQueryCompatible = resp.request.service.api.awsQueryCompatible; + if (awsQueryCompatible) { + var headers = resp.httpResponse.headers; + var queryErrorCode = headers ? headers['x-amzn-query-error'] : undefined; + if (queryErrorCode && queryErrorCode.includes(';')) { + resp.error.code = queryErrorCode.split(';')[0]; + } + } + }, true); + + addAsync('SEND', 'send', function SEND(resp, done) { + resp.httpResponse._abortCallback = done; + resp.error = null; + resp.data = null; + + function callback(httpResp) { + resp.httpResponse.stream = httpResp; + var stream = resp.request.httpRequest.stream; + var service = resp.request.service; + var api = service.api; + var operationName = resp.request.operation; + var operation = api.operations[operationName] || {}; + + httpResp.on('headers', function onHeaders(statusCode, headers, statusMessage) { + resp.request.emit( + 'httpHeaders', + [statusCode, headers, resp, statusMessage] + ); + + if (!resp.httpResponse.streaming) { + if (AWS.HttpClient.streamsApiVersion === 2) { // streams2 API check + // if we detect event streams, we're going to have to + // return the stream immediately + if (operation.hasEventOutput && service.successfulResponse(resp)) { + // skip reading the IncomingStream + resp.request.emit('httpDone'); + done(); + return; + } + + httpResp.on('readable', function onReadable() { + var data = httpResp.read(); + if (data !== null) { + resp.request.emit('httpData', [data, resp]); + } + }); + } else { // legacy streams API + httpResp.on('data', function onData(data) { + resp.request.emit('httpData', [data, resp]); + }); + } + } + }); + + httpResp.on('end', function onEnd() { + if (!stream || !stream.didCallback) { + if (AWS.HttpClient.streamsApiVersion === 2 && (operation.hasEventOutput && service.successfulResponse(resp))) { + // don't concatenate response chunks when streaming event stream data when response is successful + return; + } + resp.request.emit('httpDone'); + done(); + } + }); + } + + function progress(httpResp) { + httpResp.on('sendProgress', function onSendProgress(value) { + resp.request.emit('httpUploadProgress', [value, resp]); + }); + + httpResp.on('receiveProgress', function onReceiveProgress(value) { + resp.request.emit('httpDownloadProgress', [value, resp]); + }); + } + + function error(err) { + if (err.code !== 'RequestAbortedError') { + var errCode = err.code === 'TimeoutError' ? err.code : 'NetworkingError'; + err = AWS.util.error(err, { + code: errCode, + region: resp.request.httpRequest.region, + hostname: resp.request.httpRequest.endpoint.hostname, + retryable: true + }); + } + resp.error = err; + resp.request.emit('httpError', [resp.error, resp], function() { + done(); + }); + } + + function executeSend() { + var http = AWS.HttpClient.getInstance(); + var httpOptions = resp.request.service.config.httpOptions || {}; + try { + var stream = http.handleRequest(resp.request.httpRequest, httpOptions, + callback, error); + progress(stream); + } catch (err) { + error(err); + } + } + var timeDiff = (resp.request.service.getSkewCorrectedDate() - this.signedAt) / 1000; + if (timeDiff >= 60 * 10) { // if we signed 10min ago, re-sign + this.emit('sign', [this], function(err) { + if (err) done(err); + else executeSend(); + }); + } else { + executeSend(); + } + }); + + add('HTTP_HEADERS', 'httpHeaders', + function HTTP_HEADERS(statusCode, headers, resp, statusMessage) { + resp.httpResponse.statusCode = statusCode; + resp.httpResponse.statusMessage = statusMessage; + resp.httpResponse.headers = headers; + resp.httpResponse.body = AWS.util.buffer.toBuffer(''); + resp.httpResponse.buffers = []; + resp.httpResponse.numBytes = 0; + var dateHeader = headers.date || headers.Date; + var service = resp.request.service; + if (dateHeader) { + var serverTime = Date.parse(dateHeader); + if (service.config.correctClockSkew + && service.isClockSkewed(serverTime)) { + service.applyClockOffset(serverTime); + } + } + }); + + add('HTTP_DATA', 'httpData', function HTTP_DATA(chunk, resp) { + if (chunk) { + if (AWS.util.isNode()) { + resp.httpResponse.numBytes += chunk.length; + + var total = resp.httpResponse.headers['content-length']; + var progress = { loaded: resp.httpResponse.numBytes, total: total }; + resp.request.emit('httpDownloadProgress', [progress, resp]); + } + + resp.httpResponse.buffers.push(AWS.util.buffer.toBuffer(chunk)); + } + }); + + add('HTTP_DONE', 'httpDone', function HTTP_DONE(resp) { + // convert buffers array into single buffer + if (resp.httpResponse.buffers && resp.httpResponse.buffers.length > 0) { + var body = AWS.util.buffer.concat(resp.httpResponse.buffers); + resp.httpResponse.body = body; + } + delete resp.httpResponse.numBytes; + delete resp.httpResponse.buffers; + }); + + add('FINALIZE_ERROR', 'retry', function FINALIZE_ERROR(resp) { + if (resp.httpResponse.statusCode) { + resp.error.statusCode = resp.httpResponse.statusCode; + if (resp.error.retryable === undefined) { + resp.error.retryable = this.service.retryableError(resp.error, this); + } + } + }); + + add('INVALIDATE_CREDENTIALS', 'retry', function INVALIDATE_CREDENTIALS(resp) { + if (!resp.error) return; + switch (resp.error.code) { + case 'RequestExpired': // EC2 only + case 'ExpiredTokenException': + case 'ExpiredToken': + resp.error.retryable = true; + resp.request.service.config.credentials.expired = true; + } + }); + + add('EXPIRED_SIGNATURE', 'retry', function EXPIRED_SIGNATURE(resp) { + var err = resp.error; + if (!err) return; + if (typeof err.code === 'string' && typeof err.message === 'string') { + if (err.code.match(/Signature/) && err.message.match(/expired/)) { + resp.error.retryable = true; + } + } + }); + + add('CLOCK_SKEWED', 'retry', function CLOCK_SKEWED(resp) { + if (!resp.error) return; + if (this.service.clockSkewError(resp.error) + && this.service.config.correctClockSkew) { + resp.error.retryable = true; + } + }); + + add('REDIRECT', 'retry', function REDIRECT(resp) { + if (resp.error && resp.error.statusCode >= 300 && + resp.error.statusCode < 400 && resp.httpResponse.headers['location']) { + this.httpRequest.endpoint = + new AWS.Endpoint(resp.httpResponse.headers['location']); + this.httpRequest.headers['Host'] = this.httpRequest.endpoint.host; + resp.error.redirect = true; + resp.error.retryable = true; + } + }); + + add('RETRY_CHECK', 'retry', function RETRY_CHECK(resp) { + if (resp.error) { + if (resp.error.redirect && resp.redirectCount < resp.maxRedirects) { + resp.error.retryDelay = 0; + } else if (resp.retryCount < resp.maxRetries) { + resp.error.retryDelay = this.service.retryDelays(resp.retryCount, resp.error) || 0; + } + } + }); + + addAsync('RESET_RETRY_STATE', 'afterRetry', function RESET_RETRY_STATE(resp, done) { + var delay, willRetry = false; + + if (resp.error) { + delay = resp.error.retryDelay || 0; + if (resp.error.retryable && resp.retryCount < resp.maxRetries) { + resp.retryCount++; + willRetry = true; + } else if (resp.error.redirect && resp.redirectCount < resp.maxRedirects) { + resp.redirectCount++; + willRetry = true; + } + } + + // delay < 0 is a signal from customBackoff to skip retries + if (willRetry && delay >= 0) { + resp.error = null; + setTimeout(done, delay); + } else { + done(); + } + }); + }), + + CorePost: new SequentialExecutor().addNamedListeners(function(add) { + add('EXTRACT_REQUEST_ID', 'extractData', AWS.util.extractRequestId); + add('EXTRACT_REQUEST_ID', 'extractError', AWS.util.extractRequestId); + + add('ENOTFOUND_ERROR', 'httpError', function ENOTFOUND_ERROR(err) { + function isDNSError(err) { + return err.errno === 'ENOTFOUND' || + typeof err.errno === 'number' && + typeof AWS.util.getSystemErrorName === 'function' && + ['EAI_NONAME', 'EAI_NODATA'].indexOf(AWS.util.getSystemErrorName(err.errno) >= 0); + } + if (err.code === 'NetworkingError' && isDNSError(err)) { + var message = 'Inaccessible host: `' + err.hostname + '\' at port `' + err.port + + '\'. This service may not be available in the `' + err.region + + '\' region.'; + this.response.error = AWS.util.error(new Error(message), { + code: 'UnknownEndpoint', + region: err.region, + hostname: err.hostname, + retryable: true, + originalError: err + }); + } + }); + }), + + Logger: new SequentialExecutor().addNamedListeners(function(add) { + add('LOG_REQUEST', 'complete', function LOG_REQUEST(resp) { + var req = resp.request; + var logger = req.service.config.logger; + if (!logger) return; + function filterSensitiveLog(inputShape, shape) { + if (!shape) { + return shape; + } + if (inputShape.isSensitive) { + return '***SensitiveInformation***'; + } + switch (inputShape.type) { + case 'structure': + var struct = {}; + AWS.util.each(shape, function(subShapeName, subShape) { + if (Object.prototype.hasOwnProperty.call(inputShape.members, subShapeName)) { + struct[subShapeName] = filterSensitiveLog(inputShape.members[subShapeName], subShape); + } else { + struct[subShapeName] = subShape; + } + }); + return struct; + case 'list': + var list = []; + AWS.util.arrayEach(shape, function(subShape, index) { + list.push(filterSensitiveLog(inputShape.member, subShape)); + }); + return list; + case 'map': + var map = {}; + AWS.util.each(shape, function(key, value) { + map[key] = filterSensitiveLog(inputShape.value, value); + }); + return map; + default: + return shape; + } + } + + function buildMessage() { + var time = resp.request.service.getSkewCorrectedDate().getTime(); + var delta = (time - req.startTime.getTime()) / 1000; + var ansi = logger.isTTY ? true : false; + var status = resp.httpResponse.statusCode; + var censoredParams = req.params; + if ( + req.service.api.operations && + req.service.api.operations[req.operation] && + req.service.api.operations[req.operation].input + ) { + var inputShape = req.service.api.operations[req.operation].input; + censoredParams = filterSensitiveLog(inputShape, req.params); + } + var params = (__nccwpck_require__(73837).inspect)(censoredParams, true, null); + var message = ''; + if (ansi) message += '\x1B[33m'; + message += '[AWS ' + req.service.serviceIdentifier + ' ' + status; + message += ' ' + delta.toString() + 's ' + resp.retryCount + ' retries]'; + if (ansi) message += '\x1B[0;1m'; + message += ' ' + AWS.util.string.lowerFirst(req.operation); + message += '(' + params + ')'; + if (ansi) message += '\x1B[0m'; + return message; + } + + var line = buildMessage(); + if (typeof logger.log === 'function') { + logger.log(line); + } else if (typeof logger.write === 'function') { + logger.write(line + '\n'); + } + }); + }), + + Json: new SequentialExecutor().addNamedListeners(function(add) { + var svc = __nccwpck_require__(30083); + add('BUILD', 'build', svc.buildRequest); + add('EXTRACT_DATA', 'extractData', svc.extractData); + add('EXTRACT_ERROR', 'extractError', svc.extractError); + }), + + Rest: new SequentialExecutor().addNamedListeners(function(add) { + var svc = __nccwpck_require__(98200); + add('BUILD', 'build', svc.buildRequest); + add('EXTRACT_DATA', 'extractData', svc.extractData); + add('EXTRACT_ERROR', 'extractError', svc.extractError); + }), + + RestJson: new SequentialExecutor().addNamedListeners(function(add) { + var svc = __nccwpck_require__(5883); + add('BUILD', 'build', svc.buildRequest); + add('EXTRACT_DATA', 'extractData', svc.extractData); + add('EXTRACT_ERROR', 'extractError', svc.extractError); + add('UNSET_CONTENT_LENGTH', 'afterBuild', svc.unsetContentLength); + }), + + RestXml: new SequentialExecutor().addNamedListeners(function(add) { + var svc = __nccwpck_require__(15143); + add('BUILD', 'build', svc.buildRequest); + add('EXTRACT_DATA', 'extractData', svc.extractData); + add('EXTRACT_ERROR', 'extractError', svc.extractError); + }), + + Query: new SequentialExecutor().addNamedListeners(function(add) { + var svc = __nccwpck_require__(90761); + add('BUILD', 'build', svc.buildRequest); + add('EXTRACT_DATA', 'extractData', svc.extractData); + add('EXTRACT_ERROR', 'extractError', svc.extractError); + }) +}; + + +/***/ }), + +/***/ 1556: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var inherit = AWS.util.inherit; + +/** + * The endpoint that a service will talk to, for example, + * `'https://ec2.ap-southeast-1.amazonaws.com'`. If + * you need to override an endpoint for a service, you can + * set the endpoint on a service by passing the endpoint + * object with the `endpoint` option key: + * + * ```javascript + * var ep = new AWS.Endpoint('awsproxy.example.com'); + * var s3 = new AWS.S3({endpoint: ep}); + * s3.service.endpoint.hostname == 'awsproxy.example.com' + * ``` + * + * Note that if you do not specify a protocol, the protocol will + * be selected based on your current {AWS.config} configuration. + * + * @!attribute protocol + * @return [String] the protocol (http or https) of the endpoint + * URL + * @!attribute hostname + * @return [String] the host portion of the endpoint, e.g., + * example.com + * @!attribute host + * @return [String] the host portion of the endpoint including + * the port, e.g., example.com:80 + * @!attribute port + * @return [Integer] the port of the endpoint + * @!attribute href + * @return [String] the full URL of the endpoint + */ +AWS.Endpoint = inherit({ + + /** + * @overload Endpoint(endpoint) + * Constructs a new endpoint given an endpoint URL. If the + * URL omits a protocol (http or https), the default protocol + * set in the global {AWS.config} will be used. + * @param endpoint [String] the URL to construct an endpoint from + */ + constructor: function Endpoint(endpoint, config) { + AWS.util.hideProperties(this, ['slashes', 'auth', 'hash', 'search', 'query']); + + if (typeof endpoint === 'undefined' || endpoint === null) { + throw new Error('Invalid endpoint: ' + endpoint); + } else if (typeof endpoint !== 'string') { + return AWS.util.copy(endpoint); + } + + if (!endpoint.match(/^http/)) { + var useSSL = config && config.sslEnabled !== undefined ? + config.sslEnabled : AWS.config.sslEnabled; + endpoint = (useSSL ? 'https' : 'http') + '://' + endpoint; + } + + AWS.util.update(this, AWS.util.urlParse(endpoint)); + + // Ensure the port property is set as an integer + if (this.port) { + this.port = parseInt(this.port, 10); + } else { + this.port = this.protocol === 'https:' ? 443 : 80; + } + } + +}); + +/** + * The low level HTTP request object, encapsulating all HTTP header + * and body data sent by a service request. + * + * @!attribute method + * @return [String] the HTTP method of the request + * @!attribute path + * @return [String] the path portion of the URI, e.g., + * "/list/?start=5&num=10" + * @!attribute headers + * @return [map] + * a map of header keys and their respective values + * @!attribute body + * @return [String] the request body payload + * @!attribute endpoint + * @return [AWS.Endpoint] the endpoint for the request + * @!attribute region + * @api private + * @return [String] the region, for signing purposes only. + */ +AWS.HttpRequest = inherit({ + + /** + * @api private + */ + constructor: function HttpRequest(endpoint, region) { + endpoint = new AWS.Endpoint(endpoint); + this.method = 'POST'; + this.path = endpoint.path || '/'; + this.headers = {}; + this.body = ''; + this.endpoint = endpoint; + this.region = region; + this._userAgent = ''; + this.setUserAgent(); + }, + + /** + * @api private + */ + setUserAgent: function setUserAgent() { + this._userAgent = this.headers[this.getUserAgentHeaderName()] = AWS.util.userAgent(); + }, + + getUserAgentHeaderName: function getUserAgentHeaderName() { + var prefix = AWS.util.isBrowser() ? 'X-Amz-' : ''; + return prefix + 'User-Agent'; + }, + + /** + * @api private + */ + appendToUserAgent: function appendToUserAgent(agentPartial) { + if (typeof agentPartial === 'string' && agentPartial) { + this._userAgent += ' ' + agentPartial; + } + this.headers[this.getUserAgentHeaderName()] = this._userAgent; + }, + + /** + * @api private + */ + getUserAgent: function getUserAgent() { + return this._userAgent; + }, + + /** + * @return [String] the part of the {path} excluding the + * query string + */ + pathname: function pathname() { + return this.path.split('?', 1)[0]; + }, + + /** + * @return [String] the query string portion of the {path} + */ + search: function search() { + var query = this.path.split('?', 2)[1]; + if (query) { + query = AWS.util.queryStringParse(query); + return AWS.util.queryParamsToString(query); + } + return ''; + }, + + /** + * @api private + * update httpRequest endpoint with endpoint string + */ + updateEndpoint: function updateEndpoint(endpointStr) { + var newEndpoint = new AWS.Endpoint(endpointStr); + this.endpoint = newEndpoint; + this.path = newEndpoint.path || '/'; + if (this.headers['Host']) { + this.headers['Host'] = newEndpoint.host; + } + } +}); + +/** + * The low level HTTP response object, encapsulating all HTTP header + * and body data returned from the request. + * + * @!attribute statusCode + * @return [Integer] the HTTP status code of the response (e.g., 200, 404) + * @!attribute headers + * @return [map] + * a map of response header keys and their respective values + * @!attribute body + * @return [String] the response body payload + * @!attribute [r] streaming + * @return [Boolean] whether this response is being streamed at a low-level. + * Defaults to `false` (buffered reads). Do not modify this manually, use + * {createUnbufferedStream} to convert the stream to unbuffered mode + * instead. + */ +AWS.HttpResponse = inherit({ + + /** + * @api private + */ + constructor: function HttpResponse() { + this.statusCode = undefined; + this.headers = {}; + this.body = undefined; + this.streaming = false; + this.stream = null; + }, + + /** + * Disables buffering on the HTTP response and returns the stream for reading. + * @return [Stream, XMLHttpRequest, null] the underlying stream object. + * Use this object to directly read data off of the stream. + * @note This object is only available after the {AWS.Request~httpHeaders} + * event has fired. This method must be called prior to + * {AWS.Request~httpData}. + * @example Taking control of a stream + * request.on('httpHeaders', function(statusCode, headers) { + * if (statusCode < 300) { + * if (headers.etag === 'xyz') { + * // pipe the stream, disabling buffering + * var stream = this.response.httpResponse.createUnbufferedStream(); + * stream.pipe(process.stdout); + * } else { // abort this request and set a better error message + * this.abort(); + * this.response.error = new Error('Invalid ETag'); + * } + * } + * }).send(console.log); + */ + createUnbufferedStream: function createUnbufferedStream() { + this.streaming = true; + return this.stream; + } +}); + + +AWS.HttpClient = inherit({}); + +/** + * @api private + */ +AWS.HttpClient.getInstance = function getInstance() { + if (this.singleton === undefined) { + this.singleton = new this(); + } + return this.singleton; +}; + + +/***/ }), + +/***/ 2310: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var Stream = AWS.util.stream.Stream; +var TransformStream = AWS.util.stream.Transform; +var ReadableStream = AWS.util.stream.Readable; +__nccwpck_require__(1556); +var CONNECTION_REUSE_ENV_NAME = 'AWS_NODEJS_CONNECTION_REUSE_ENABLED'; + +/** + * @api private + */ +AWS.NodeHttpClient = AWS.util.inherit({ + handleRequest: function handleRequest(httpRequest, httpOptions, callback, errCallback) { + var self = this; + var endpoint = httpRequest.endpoint; + var pathPrefix = ''; + if (!httpOptions) httpOptions = {}; + if (httpOptions.proxy) { + pathPrefix = endpoint.protocol + '//' + endpoint.hostname; + if (endpoint.port !== 80 && endpoint.port !== 443) { + pathPrefix += ':' + endpoint.port; + } + endpoint = new AWS.Endpoint(httpOptions.proxy); + } + + var useSSL = endpoint.protocol === 'https:'; + var http = useSSL ? __nccwpck_require__(95687) : __nccwpck_require__(13685); + var options = { + host: endpoint.hostname, + port: endpoint.port, + method: httpRequest.method, + headers: httpRequest.headers, + path: pathPrefix + httpRequest.path + }; + + AWS.util.update(options, httpOptions); + + if (!httpOptions.agent) { + options.agent = this.getAgent(useSSL, { + keepAlive: process.env[CONNECTION_REUSE_ENV_NAME] === '1' ? true : false + }); + } + + delete options.proxy; // proxy isn't an HTTP option + delete options.timeout; // timeout isn't an HTTP option + + var stream = http.request(options, function (httpResp) { + if (stream.didCallback) return; + + callback(httpResp); + httpResp.emit( + 'headers', + httpResp.statusCode, + httpResp.headers, + httpResp.statusMessage + ); + }); + httpRequest.stream = stream; // attach stream to httpRequest + stream.didCallback = false; + + // connection timeout support + if (httpOptions.connectTimeout) { + var connectTimeoutId; + stream.on('socket', function(socket) { + if (socket.connecting) { + connectTimeoutId = setTimeout(function connectTimeout() { + if (stream.didCallback) return; stream.didCallback = true; + + stream.abort(); + errCallback(AWS.util.error( + new Error('Socket timed out without establishing a connection'), + {code: 'TimeoutError'} + )); + }, httpOptions.connectTimeout); + socket.on('connect', function() { + clearTimeout(connectTimeoutId); + connectTimeoutId = null; + }); + } + }); + } + + // timeout support + stream.setTimeout(httpOptions.timeout || 0, function() { + if (stream.didCallback) return; stream.didCallback = true; + + var msg = 'Connection timed out after ' + httpOptions.timeout + 'ms'; + errCallback(AWS.util.error(new Error(msg), {code: 'TimeoutError'})); + stream.abort(); + }); + + stream.on('error', function(err) { + if (connectTimeoutId) { + clearTimeout(connectTimeoutId); + connectTimeoutId = null; + } + if (stream.didCallback) return; stream.didCallback = true; + if ('ECONNRESET' === err.code || 'EPIPE' === err.code || 'ETIMEDOUT' === err.code) { + errCallback(AWS.util.error(err, {code: 'TimeoutError'})); + } else { + errCallback(err); + } + }); + + var expect = httpRequest.headers.Expect || httpRequest.headers.expect; + if (expect === '100-continue') { + stream.once('continue', function() { + self.writeBody(stream, httpRequest); + }); + } else { + this.writeBody(stream, httpRequest); + } + + return stream; + }, + + writeBody: function writeBody(stream, httpRequest) { + var body = httpRequest.body; + var totalBytes = parseInt(httpRequest.headers['Content-Length'], 10); + + if (body instanceof Stream) { + // For progress support of streaming content - + // pipe the data through a transform stream to emit 'sendProgress' events + var progressStream = this.progressStream(stream, totalBytes); + if (progressStream) { + body.pipe(progressStream).pipe(stream); + } else { + body.pipe(stream); + } + } else if (body) { + // The provided body is a buffer/string and is already fully available in memory - + // For performance it's best to send it as a whole by calling stream.end(body), + // Callers expect a 'sendProgress' event which is best emitted once + // the http request stream has been fully written and all data flushed. + // The use of totalBytes is important over body.length for strings where + // length is char length and not byte length. + stream.once('finish', function() { + stream.emit('sendProgress', { + loaded: totalBytes, + total: totalBytes + }); + }); + stream.end(body); + } else { + // no request body + stream.end(); + } + }, + + /** + * Create the https.Agent or http.Agent according to the request schema. + */ + getAgent: function getAgent(useSSL, agentOptions) { + var http = useSSL ? __nccwpck_require__(95687) : __nccwpck_require__(13685); + if (useSSL) { + if (!AWS.NodeHttpClient.sslAgent) { + AWS.NodeHttpClient.sslAgent = new http.Agent(AWS.util.merge({ + rejectUnauthorized: process.env.NODE_TLS_REJECT_UNAUTHORIZED === '0' ? false : true + }, agentOptions || {})); + AWS.NodeHttpClient.sslAgent.setMaxListeners(0); + + // delegate maxSockets to globalAgent, set a default limit of 50 if current value is Infinity. + // Users can bypass this default by supplying their own Agent as part of SDK configuration. + Object.defineProperty(AWS.NodeHttpClient.sslAgent, 'maxSockets', { + enumerable: true, + get: function() { + var defaultMaxSockets = 50; + var globalAgent = http.globalAgent; + if (globalAgent && globalAgent.maxSockets !== Infinity && typeof globalAgent.maxSockets === 'number') { + return globalAgent.maxSockets; + } + return defaultMaxSockets; + } + }); + } + return AWS.NodeHttpClient.sslAgent; + } else { + if (!AWS.NodeHttpClient.agent) { + AWS.NodeHttpClient.agent = new http.Agent(agentOptions); + } + return AWS.NodeHttpClient.agent; + } + }, + + progressStream: function progressStream(stream, totalBytes) { + if (typeof TransformStream === 'undefined') { + // for node 0.8 there is no streaming progress + return; + } + var loadedBytes = 0; + var reporter = new TransformStream(); + reporter._transform = function(chunk, encoding, callback) { + if (chunk) { + loadedBytes += chunk.length; + stream.emit('sendProgress', { + loaded: loadedBytes, + total: totalBytes + }); + } + callback(null, chunk); + }; + return reporter; + }, + + emitter: null +}); + +/** + * @!ignore + */ + +/** + * @api private + */ +AWS.HttpClient.prototype = AWS.NodeHttpClient.prototype; + +/** + * @api private + */ +AWS.HttpClient.streamsApiVersion = ReadableStream ? 2 : 1; + + +/***/ }), + +/***/ 47495: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var util = __nccwpck_require__(77985); + +function JsonBuilder() { } + +JsonBuilder.prototype.build = function(value, shape) { + return JSON.stringify(translate(value, shape)); +}; + +function translate(value, shape) { + if (!shape || value === undefined || value === null) return undefined; + + switch (shape.type) { + case 'structure': return translateStructure(value, shape); + case 'map': return translateMap(value, shape); + case 'list': return translateList(value, shape); + default: return translateScalar(value, shape); + } +} + +function translateStructure(structure, shape) { + if (shape.isDocument) { + return structure; + } + var struct = {}; + util.each(structure, function(name, value) { + var memberShape = shape.members[name]; + if (memberShape) { + if (memberShape.location !== 'body') return; + var locationName = memberShape.isLocationName ? memberShape.name : name; + var result = translate(value, memberShape); + if (result !== undefined) struct[locationName] = result; + } + }); + return struct; +} + +function translateList(list, shape) { + var out = []; + util.arrayEach(list, function(value) { + var result = translate(value, shape.member); + if (result !== undefined) out.push(result); + }); + return out; +} + +function translateMap(map, shape) { + var out = {}; + util.each(map, function(key, value) { + var result = translate(value, shape.value); + if (result !== undefined) out[key] = result; + }); + return out; +} + +function translateScalar(value, shape) { + return shape.toWireFormat(value); +} + +/** + * @api private + */ +module.exports = JsonBuilder; + + +/***/ }), + +/***/ 5474: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var util = __nccwpck_require__(77985); + +function JsonParser() { } + +JsonParser.prototype.parse = function(value, shape) { + return translate(JSON.parse(value), shape); +}; + +function translate(value, shape) { + if (!shape || value === undefined) return undefined; + + switch (shape.type) { + case 'structure': return translateStructure(value, shape); + case 'map': return translateMap(value, shape); + case 'list': return translateList(value, shape); + default: return translateScalar(value, shape); + } +} + +function translateStructure(structure, shape) { + if (structure == null) return undefined; + if (shape.isDocument) return structure; + + var struct = {}; + var shapeMembers = shape.members; + var isAwsQueryCompatible = shape.api && shape.api.awsQueryCompatible; + util.each(shapeMembers, function(name, memberShape) { + var locationName = memberShape.isLocationName ? memberShape.name : name; + if (Object.prototype.hasOwnProperty.call(structure, locationName)) { + var value = structure[locationName]; + var result = translate(value, memberShape); + if (result !== undefined) struct[name] = result; + } else if (isAwsQueryCompatible && memberShape.defaultValue) { + if (memberShape.type === 'list') { + struct[name] = typeof memberShape.defaultValue === 'function' ? memberShape.defaultValue() : memberShape.defaultValue; + } + } + }); + return struct; +} + +function translateList(list, shape) { + if (list == null) return undefined; + + var out = []; + util.arrayEach(list, function(value) { + var result = translate(value, shape.member); + if (result === undefined) out.push(null); + else out.push(result); + }); + return out; +} + +function translateMap(map, shape) { + if (map == null) return undefined; + + var out = {}; + util.each(map, function(key, value) { + var result = translate(value, shape.value); + if (result === undefined) out[key] = null; + else out[key] = result; + }); + return out; +} + +function translateScalar(value, shape) { + return shape.toType(value); +} + +/** + * @api private + */ +module.exports = JsonParser; + + +/***/ }), + +/***/ 93985: +/***/ ((module) => { + +var warning = [ + 'We are formalizing our plans to enter AWS SDK for JavaScript (v2) into maintenance mode in 2023.\n', + 'Please migrate your code to use AWS SDK for JavaScript (v3).', + 'For more information, check the migration guide at https://a.co/7PzMCcy' +].join('\n'); + +module.exports = { + suppress: false +}; + +/** + * To suppress this message: + * @example + * require('aws-sdk/lib/maintenance_mode_message').suppress = true; + */ +function emitWarning() { + if (typeof process === 'undefined') + return; + + // Skip maintenance mode message in Lambda environments + if ( + typeof process.env === 'object' && + typeof process.env.AWS_EXECUTION_ENV !== 'undefined' && + process.env.AWS_EXECUTION_ENV.indexOf('AWS_Lambda_') === 0 + ) { + return; + } + + if ( + typeof process.env === 'object' && + typeof process.env.AWS_SDK_JS_SUPPRESS_MAINTENANCE_MODE_MESSAGE !== 'undefined' + ) { + return; + } + + if (typeof process.emitWarning === 'function') { + process.emitWarning(warning, { + type: 'NOTE' + }); + } +} + +setTimeout(function () { + if (!module.exports.suppress) { + emitWarning(); + } +}, 0); + + +/***/ }), + +/***/ 25768: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +__nccwpck_require__(1556); +var inherit = AWS.util.inherit; +var getMetadataServiceEndpoint = __nccwpck_require__(608); +var URL = (__nccwpck_require__(57310).URL); + +/** + * Represents a metadata service available on EC2 instances. Using the + * {request} method, you can receieve metadata about any available resource + * on the metadata service. + * + * You can disable the use of the IMDS by setting the AWS_EC2_METADATA_DISABLED + * environment variable to a truthy value. + * + * @!attribute [r] httpOptions + * @return [map] a map of options to pass to the underlying HTTP request: + * + * * **timeout** (Number) — a timeout value in milliseconds to wait + * before aborting the connection. Set to 0 for no timeout. + * + * @!macro nobrowser + */ +AWS.MetadataService = inherit({ + /** + * @return [String] the endpoint of the instance metadata service + */ + endpoint: getMetadataServiceEndpoint(), + + /** + * @!ignore + */ + + /** + * Default HTTP options. By default, the metadata service is set to not + * timeout on long requests. This means that on non-EC2 machines, this + * request will never return. If you are calling this operation from an + * environment that may not always run on EC2, set a `timeout` value so + * the SDK will abort the request after a given number of milliseconds. + */ + httpOptions: { timeout: 0 }, + + /** + * when enabled, metadata service will not fetch token + */ + disableFetchToken: false, + + /** + * Creates a new MetadataService object with a given set of options. + * + * @option options host [String] the hostname of the instance metadata + * service + * @option options httpOptions [map] a map of options to pass to the + * underlying HTTP request: + * + * * **timeout** (Number) — a timeout value in milliseconds to wait + * before aborting the connection. Set to 0 for no timeout. + * @option options maxRetries [Integer] the maximum number of retries to + * perform for timeout errors + * @option options retryDelayOptions [map] A set of options to configure the + * retry delay on retryable errors. See AWS.Config for details. + * @option options ec2MetadataV1Disabled [boolean] Whether to block IMDS v1 fallback. + * @option options profile [string] A profile to check for IMDSv1 fallback settings. + * @option options filename [string] Optional filename for the config file. + */ + constructor: function MetadataService(options) { + if (options && options.host) { + options.endpoint = 'http://' + options.host; + delete options.host; + } + this.profile = options && options.profile || process.env.AWS_PROFILE || AWS.util.defaultProfile; + this.ec2MetadataV1Disabled = !!(options && options.ec2MetadataV1Disabled); + this.filename = options && options.filename; + AWS.util.update(this, options); + }, + + /** + * Sends a request to the instance metadata service for a given resource. + * + * @param path [String] the path of the resource to get + * + * @param options [map] an optional map used to make request + * + * * **method** (String) — HTTP request method + * + * * **headers** (map) — a map of response header keys and their respective values + * + * @callback callback function(err, data) + * Called when a response is available from the service. + * @param err [Error, null] if an error occurred, this value will be set + * @param data [String, null] if the request was successful, the body of + * the response + */ + request: function request(path, options, callback) { + if (arguments.length === 2) { + callback = options; + options = {}; + } + + if (process.env[AWS.util.imdsDisabledEnv]) { + callback(new Error('EC2 Instance Metadata Service access disabled')); + return; + } + + path = path || '/'; + + // Verify that host is a valid URL + if (URL) { new URL(this.endpoint); } + + var httpRequest = new AWS.HttpRequest(this.endpoint + path); + httpRequest.method = options.method || 'GET'; + if (options.headers) { + httpRequest.headers = options.headers; + } + AWS.util.handleRequestWithRetries(httpRequest, this, callback); + }, + + /** + * @api private + */ + loadCredentialsCallbacks: [], + + /** + * Fetches metadata token used for getting credentials + * + * @api private + * @callback callback function(err, token) + * Called when token is loaded from the resource + */ + fetchMetadataToken: function fetchMetadataToken(callback) { + var self = this; + var tokenFetchPath = '/latest/api/token'; + self.request( + tokenFetchPath, + { + 'method': 'PUT', + 'headers': { + 'x-aws-ec2-metadata-token-ttl-seconds': '21600' + } + }, + callback + ); + }, + + /** + * Fetches credentials + * + * @api private + * @callback cb function(err, creds) + * Called when credentials are loaded from the resource + */ + fetchCredentials: function fetchCredentials(options, cb) { + var self = this; + var basePath = '/latest/meta-data/iam/security-credentials/'; + + var isImdsV1Fallback = self.disableFetchToken + || !(options && options.headers && options.headers['x-aws-ec2-metadata-token']); + + if (isImdsV1Fallback && !(process.env.AWS_EC2_METADATA_DISABLED)) { + try { + var profiles = AWS.util.getProfilesFromSharedConfig(AWS.util.iniLoader, this.filename); + var profileSettings = profiles[this.profile] || {}; + } catch (e) { + profileSettings = {}; + } + + if (profileSettings.ec2_metadata_v1_disabled && profileSettings.ec2_metadata_v1_disabled !== 'false') { + return cb(AWS.util.error( + new Error('AWS EC2 Metadata v1 fallback has been blocked by AWS config file profile.') + )); + } + + if (self.ec2MetadataV1Disabled) { + return cb(AWS.util.error( + new Error('AWS EC2 Metadata v1 fallback has been blocked by AWS.MetadataService::options.ec2MetadataV1Disabled=true.') + )); + } + + if (process.env.AWS_EC2_METADATA_V1_DISABLED && process.env.AWS_EC2_METADATA_V1_DISABLED !== 'false') { + return cb(AWS.util.error( + new Error('AWS EC2 Metadata v1 fallback has been blocked by process.env.AWS_EC2_METADATA_V1_DISABLED.') + )); + } + } + + self.request(basePath, options, function (err, roleName) { + if (err) { + self.disableFetchToken = !(err.statusCode === 401); + cb(AWS.util.error( + err, + { + message: 'EC2 Metadata roleName request returned error' + } + )); + return; + } + roleName = roleName.split('\n')[0]; // grab first (and only) role + self.request(basePath + roleName, options, function (credErr, credData) { + if (credErr) { + self.disableFetchToken = !(credErr.statusCode === 401); + cb(AWS.util.error( + credErr, + { + message: 'EC2 Metadata creds request returned error' + } + )); + return; + } + try { + var credentials = JSON.parse(credData); + cb(null, credentials); + } catch (parseError) { + cb(parseError); + } + }); + }); + }, + + /** + * Loads a set of credentials stored in the instance metadata service + * + * @api private + * @callback callback function(err, credentials) + * Called when credentials are loaded from the resource + * @param err [Error] if an error occurred, this value will be set + * @param credentials [Object] the raw JSON object containing all + * metadata from the credentials resource + */ + loadCredentials: function loadCredentials(callback) { + var self = this; + self.loadCredentialsCallbacks.push(callback); + if (self.loadCredentialsCallbacks.length > 1) { return; } + + function callbacks(err, creds) { + var cb; + while ((cb = self.loadCredentialsCallbacks.shift()) !== undefined) { + cb(err, creds); + } + } + + if (self.disableFetchToken) { + self.fetchCredentials({}, callbacks); + } else { + self.fetchMetadataToken(function(tokenError, token) { + if (tokenError) { + if (tokenError.code === 'TimeoutError') { + self.disableFetchToken = true; + } else if (tokenError.retryable === true) { + callbacks(AWS.util.error( + tokenError, + { + message: 'EC2 Metadata token request returned error' + } + )); + return; + } else if (tokenError.statusCode === 400) { + callbacks(AWS.util.error( + tokenError, + { + message: 'EC2 Metadata token request returned 400' + } + )); + return; + } + } + var options = {}; + if (token) { + options.headers = { + 'x-aws-ec2-metadata-token': token + }; + } + self.fetchCredentials(options, callbacks); + }); + + } + } +}); + +/** + * @api private + */ +module.exports = AWS.MetadataService; + + +/***/ }), + +/***/ 83205: +/***/ ((module) => { + +var getEndpoint = function() { + return { + IPv4: 'http://169.254.169.254', + IPv6: 'http://[fd00:ec2::254]', + }; +}; + +module.exports = getEndpoint; + + +/***/ }), + +/***/ 95578: +/***/ ((module) => { + +var ENV_ENDPOINT_NAME = 'AWS_EC2_METADATA_SERVICE_ENDPOINT'; +var CONFIG_ENDPOINT_NAME = 'ec2_metadata_service_endpoint'; + +var getEndpointConfigOptions = function() { + return { + environmentVariableSelector: function(env) { return env[ENV_ENDPOINT_NAME]; }, + configFileSelector: function(profile) { return profile[CONFIG_ENDPOINT_NAME]; }, + default: undefined, + }; +}; + +module.exports = getEndpointConfigOptions; + + +/***/ }), + +/***/ 37997: +/***/ ((module) => { + +var getEndpointMode = function() { + return { + IPv4: 'IPv4', + IPv6: 'IPv6', + }; +}; + +module.exports = getEndpointMode; + + +/***/ }), + +/***/ 45509: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var EndpointMode = __nccwpck_require__(37997)(); + +var ENV_ENDPOINT_MODE_NAME = 'AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE'; +var CONFIG_ENDPOINT_MODE_NAME = 'ec2_metadata_service_endpoint_mode'; + +var getEndpointModeConfigOptions = function() { + return { + environmentVariableSelector: function(env) { return env[ENV_ENDPOINT_MODE_NAME]; }, + configFileSelector: function(profile) { return profile[CONFIG_ENDPOINT_MODE_NAME]; }, + default: EndpointMode.IPv4, + }; +}; + +module.exports = getEndpointModeConfigOptions; + + +/***/ }), + +/***/ 608: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +var Endpoint = __nccwpck_require__(83205)(); +var EndpointMode = __nccwpck_require__(37997)(); + +var ENDPOINT_CONFIG_OPTIONS = __nccwpck_require__(95578)(); +var ENDPOINT_MODE_CONFIG_OPTIONS = __nccwpck_require__(45509)(); + +var getMetadataServiceEndpoint = function() { + var endpoint = AWS.util.loadConfig(ENDPOINT_CONFIG_OPTIONS); + if (endpoint !== undefined) return endpoint; + + var endpointMode = AWS.util.loadConfig(ENDPOINT_MODE_CONFIG_OPTIONS); + switch (endpointMode) { + case EndpointMode.IPv4: + return Endpoint.IPv4; + case EndpointMode.IPv6: + return Endpoint.IPv6; + default: + throw new Error('Unsupported endpoint mode: ' + endpointMode); + } +}; + +module.exports = getMetadataServiceEndpoint; + + +/***/ }), + +/***/ 17657: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var Collection = __nccwpck_require__(71965); +var Operation = __nccwpck_require__(28083); +var Shape = __nccwpck_require__(71349); +var Paginator = __nccwpck_require__(45938); +var ResourceWaiter = __nccwpck_require__(41368); +var metadata = __nccwpck_require__(17752); + +var util = __nccwpck_require__(77985); +var property = util.property; +var memoizedProperty = util.memoizedProperty; + +function Api(api, options) { + var self = this; + api = api || {}; + options = options || {}; + options.api = this; + + api.metadata = api.metadata || {}; + + var serviceIdentifier = options.serviceIdentifier; + delete options.serviceIdentifier; + + property(this, 'isApi', true, false); + property(this, 'apiVersion', api.metadata.apiVersion); + property(this, 'endpointPrefix', api.metadata.endpointPrefix); + property(this, 'signingName', api.metadata.signingName); + property(this, 'globalEndpoint', api.metadata.globalEndpoint); + property(this, 'signatureVersion', api.metadata.signatureVersion); + property(this, 'jsonVersion', api.metadata.jsonVersion); + property(this, 'targetPrefix', api.metadata.targetPrefix); + property(this, 'protocol', api.metadata.protocol); + property(this, 'timestampFormat', api.metadata.timestampFormat); + property(this, 'xmlNamespaceUri', api.metadata.xmlNamespace); + property(this, 'abbreviation', api.metadata.serviceAbbreviation); + property(this, 'fullName', api.metadata.serviceFullName); + property(this, 'serviceId', api.metadata.serviceId); + if (serviceIdentifier && metadata[serviceIdentifier]) { + property(this, 'xmlNoDefaultLists', metadata[serviceIdentifier].xmlNoDefaultLists, false); + } + + memoizedProperty(this, 'className', function() { + var name = api.metadata.serviceAbbreviation || api.metadata.serviceFullName; + if (!name) return null; + + name = name.replace(/^Amazon|AWS\s*|\(.*|\s+|\W+/g, ''); + if (name === 'ElasticLoadBalancing') name = 'ELB'; + return name; + }); + + function addEndpointOperation(name, operation) { + if (operation.endpointoperation === true) { + property(self, 'endpointOperation', util.string.lowerFirst(name)); + } + if (operation.endpointdiscovery && !self.hasRequiredEndpointDiscovery) { + property( + self, + 'hasRequiredEndpointDiscovery', + operation.endpointdiscovery.required === true + ); + } + } + + property(this, 'operations', new Collection(api.operations, options, function(name, operation) { + return new Operation(name, operation, options); + }, util.string.lowerFirst, addEndpointOperation)); + + property(this, 'shapes', new Collection(api.shapes, options, function(name, shape) { + return Shape.create(shape, options); + })); + + property(this, 'paginators', new Collection(api.paginators, options, function(name, paginator) { + return new Paginator(name, paginator, options); + })); + + property(this, 'waiters', new Collection(api.waiters, options, function(name, waiter) { + return new ResourceWaiter(name, waiter, options); + }, util.string.lowerFirst)); + + if (options.documentation) { + property(this, 'documentation', api.documentation); + property(this, 'documentationUrl', api.documentationUrl); + } + property(this, 'awsQueryCompatible', api.metadata.awsQueryCompatible); +} + +/** + * @api private + */ +module.exports = Api; + + +/***/ }), + +/***/ 71965: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var memoizedProperty = (__nccwpck_require__(77985).memoizedProperty); + +function memoize(name, value, factory, nameTr) { + memoizedProperty(this, nameTr(name), function() { + return factory(name, value); + }); +} + +function Collection(iterable, options, factory, nameTr, callback) { + nameTr = nameTr || String; + var self = this; + + for (var id in iterable) { + if (Object.prototype.hasOwnProperty.call(iterable, id)) { + memoize.call(self, id, iterable[id], factory, nameTr); + if (callback) callback(id, iterable[id]); + } + } +} + +/** + * @api private + */ +module.exports = Collection; + + +/***/ }), + +/***/ 28083: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var Shape = __nccwpck_require__(71349); + +var util = __nccwpck_require__(77985); +var property = util.property; +var memoizedProperty = util.memoizedProperty; + +function Operation(name, operation, options) { + var self = this; + options = options || {}; + + property(this, 'name', operation.name || name); + property(this, 'api', options.api, false); + + operation.http = operation.http || {}; + property(this, 'endpoint', operation.endpoint); + property(this, 'httpMethod', operation.http.method || 'POST'); + property(this, 'httpPath', operation.http.requestUri || '/'); + property(this, 'authtype', operation.authtype || ''); + property( + this, + 'endpointDiscoveryRequired', + operation.endpointdiscovery ? + (operation.endpointdiscovery.required ? 'REQUIRED' : 'OPTIONAL') : + 'NULL' + ); + + // httpChecksum replaces usage of httpChecksumRequired, but some APIs + // (s3control) still uses old trait. + var httpChecksumRequired = operation.httpChecksumRequired + || (operation.httpChecksum && operation.httpChecksum.requestChecksumRequired); + property(this, 'httpChecksumRequired', httpChecksumRequired, false); + + memoizedProperty(this, 'input', function() { + if (!operation.input) { + return new Shape.create({type: 'structure'}, options); + } + return Shape.create(operation.input, options); + }); + + memoizedProperty(this, 'output', function() { + if (!operation.output) { + return new Shape.create({type: 'structure'}, options); + } + return Shape.create(operation.output, options); + }); + + memoizedProperty(this, 'errors', function() { + var list = []; + if (!operation.errors) return null; + + for (var i = 0; i < operation.errors.length; i++) { + list.push(Shape.create(operation.errors[i], options)); + } + + return list; + }); + + memoizedProperty(this, 'paginator', function() { + return options.api.paginators[name]; + }); + + if (options.documentation) { + property(this, 'documentation', operation.documentation); + property(this, 'documentationUrl', operation.documentationUrl); + } + + // idempotentMembers only tracks top-level input shapes + memoizedProperty(this, 'idempotentMembers', function() { + var idempotentMembers = []; + var input = self.input; + var members = input.members; + if (!input.members) { + return idempotentMembers; + } + for (var name in members) { + if (!members.hasOwnProperty(name)) { + continue; + } + if (members[name].isIdempotent === true) { + idempotentMembers.push(name); + } + } + return idempotentMembers; + }); + + memoizedProperty(this, 'hasEventOutput', function() { + var output = self.output; + return hasEventStream(output); + }); +} + +function hasEventStream(topLevelShape) { + var members = topLevelShape.members; + var payload = topLevelShape.payload; + + if (!topLevelShape.members) { + return false; + } + + if (payload) { + var payloadMember = members[payload]; + return payloadMember.isEventStream; + } + + // check if any member is an event stream + for (var name in members) { + if (!members.hasOwnProperty(name)) { + if (members[name].isEventStream === true) { + return true; + } + } + } + return false; +} + +/** + * @api private + */ +module.exports = Operation; + + +/***/ }), + +/***/ 45938: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var property = (__nccwpck_require__(77985).property); + +function Paginator(name, paginator) { + property(this, 'inputToken', paginator.input_token); + property(this, 'limitKey', paginator.limit_key); + property(this, 'moreResults', paginator.more_results); + property(this, 'outputToken', paginator.output_token); + property(this, 'resultKey', paginator.result_key); +} + +/** + * @api private + */ +module.exports = Paginator; + + +/***/ }), + +/***/ 41368: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var util = __nccwpck_require__(77985); +var property = util.property; + +function ResourceWaiter(name, waiter, options) { + options = options || {}; + property(this, 'name', name); + property(this, 'api', options.api, false); + + if (waiter.operation) { + property(this, 'operation', util.string.lowerFirst(waiter.operation)); + } + + var self = this; + var keys = [ + 'type', + 'description', + 'delay', + 'maxAttempts', + 'acceptors' + ]; + + keys.forEach(function(key) { + var value = waiter[key]; + if (value) { + property(self, key, value); + } + }); +} + +/** + * @api private + */ +module.exports = ResourceWaiter; + + +/***/ }), + +/***/ 71349: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var Collection = __nccwpck_require__(71965); + +var util = __nccwpck_require__(77985); + +function property(obj, name, value) { + if (value !== null && value !== undefined) { + util.property.apply(this, arguments); + } +} + +function memoizedProperty(obj, name) { + if (!obj.constructor.prototype[name]) { + util.memoizedProperty.apply(this, arguments); + } +} + +function Shape(shape, options, memberName) { + options = options || {}; + + property(this, 'shape', shape.shape); + property(this, 'api', options.api, false); + property(this, 'type', shape.type); + property(this, 'enum', shape.enum); + property(this, 'min', shape.min); + property(this, 'max', shape.max); + property(this, 'pattern', shape.pattern); + property(this, 'location', shape.location || this.location || 'body'); + property(this, 'name', this.name || shape.xmlName || shape.queryName || + shape.locationName || memberName); + property(this, 'isStreaming', shape.streaming || this.isStreaming || false); + property(this, 'requiresLength', shape.requiresLength, false); + property(this, 'isComposite', shape.isComposite || false); + property(this, 'isShape', true, false); + property(this, 'isQueryName', Boolean(shape.queryName), false); + property(this, 'isLocationName', Boolean(shape.locationName), false); + property(this, 'isIdempotent', shape.idempotencyToken === true); + property(this, 'isJsonValue', shape.jsonvalue === true); + property(this, 'isSensitive', shape.sensitive === true || shape.prototype && shape.prototype.sensitive === true); + property(this, 'isEventStream', Boolean(shape.eventstream), false); + property(this, 'isEvent', Boolean(shape.event), false); + property(this, 'isEventPayload', Boolean(shape.eventpayload), false); + property(this, 'isEventHeader', Boolean(shape.eventheader), false); + property(this, 'isTimestampFormatSet', Boolean(shape.timestampFormat) || shape.prototype && shape.prototype.isTimestampFormatSet === true, false); + property(this, 'endpointDiscoveryId', Boolean(shape.endpointdiscoveryid), false); + property(this, 'hostLabel', Boolean(shape.hostLabel), false); + + if (options.documentation) { + property(this, 'documentation', shape.documentation); + property(this, 'documentationUrl', shape.documentationUrl); + } + + if (shape.xmlAttribute) { + property(this, 'isXmlAttribute', shape.xmlAttribute || false); + } + + // type conversion and parsing + property(this, 'defaultValue', null); + this.toWireFormat = function(value) { + if (value === null || value === undefined) return ''; + return value; + }; + this.toType = function(value) { return value; }; +} + +/** + * @api private + */ +Shape.normalizedTypes = { + character: 'string', + double: 'float', + long: 'integer', + short: 'integer', + biginteger: 'integer', + bigdecimal: 'float', + blob: 'binary' +}; + +/** + * @api private + */ +Shape.types = { + 'structure': StructureShape, + 'list': ListShape, + 'map': MapShape, + 'boolean': BooleanShape, + 'timestamp': TimestampShape, + 'float': FloatShape, + 'integer': IntegerShape, + 'string': StringShape, + 'base64': Base64Shape, + 'binary': BinaryShape +}; + +Shape.resolve = function resolve(shape, options) { + if (shape.shape) { + var refShape = options.api.shapes[shape.shape]; + if (!refShape) { + throw new Error('Cannot find shape reference: ' + shape.shape); + } + + return refShape; + } else { + return null; + } +}; + +Shape.create = function create(shape, options, memberName) { + if (shape.isShape) return shape; + + var refShape = Shape.resolve(shape, options); + if (refShape) { + var filteredKeys = Object.keys(shape); + if (!options.documentation) { + filteredKeys = filteredKeys.filter(function(name) { + return !name.match(/documentation/); + }); + } + + // create an inline shape with extra members + var InlineShape = function() { + refShape.constructor.call(this, shape, options, memberName); + }; + InlineShape.prototype = refShape; + return new InlineShape(); + } else { + // set type if not set + if (!shape.type) { + if (shape.members) shape.type = 'structure'; + else if (shape.member) shape.type = 'list'; + else if (shape.key) shape.type = 'map'; + else shape.type = 'string'; + } + + // normalize types + var origType = shape.type; + if (Shape.normalizedTypes[shape.type]) { + shape.type = Shape.normalizedTypes[shape.type]; + } + + if (Shape.types[shape.type]) { + return new Shape.types[shape.type](shape, options, memberName); + } else { + throw new Error('Unrecognized shape type: ' + origType); + } + } +}; + +function CompositeShape(shape) { + Shape.apply(this, arguments); + property(this, 'isComposite', true); + + if (shape.flattened) { + property(this, 'flattened', shape.flattened || false); + } +} + +function StructureShape(shape, options) { + var self = this; + var requiredMap = null, firstInit = !this.isShape; + + CompositeShape.apply(this, arguments); + + if (firstInit) { + property(this, 'defaultValue', function() { return {}; }); + property(this, 'members', {}); + property(this, 'memberNames', []); + property(this, 'required', []); + property(this, 'isRequired', function() { return false; }); + property(this, 'isDocument', Boolean(shape.document)); + } + + if (shape.members) { + property(this, 'members', new Collection(shape.members, options, function(name, member) { + return Shape.create(member, options, name); + })); + memoizedProperty(this, 'memberNames', function() { + return shape.xmlOrder || Object.keys(shape.members); + }); + + if (shape.event) { + memoizedProperty(this, 'eventPayloadMemberName', function() { + var members = self.members; + var memberNames = self.memberNames; + // iterate over members to find ones that are event payloads + for (var i = 0, iLen = memberNames.length; i < iLen; i++) { + if (members[memberNames[i]].isEventPayload) { + return memberNames[i]; + } + } + }); + + memoizedProperty(this, 'eventHeaderMemberNames', function() { + var members = self.members; + var memberNames = self.memberNames; + var eventHeaderMemberNames = []; + // iterate over members to find ones that are event headers + for (var i = 0, iLen = memberNames.length; i < iLen; i++) { + if (members[memberNames[i]].isEventHeader) { + eventHeaderMemberNames.push(memberNames[i]); + } + } + return eventHeaderMemberNames; + }); + } + } + + if (shape.required) { + property(this, 'required', shape.required); + property(this, 'isRequired', function(name) { + if (!requiredMap) { + requiredMap = {}; + for (var i = 0; i < shape.required.length; i++) { + requiredMap[shape.required[i]] = true; + } + } + + return requiredMap[name]; + }, false, true); + } + + property(this, 'resultWrapper', shape.resultWrapper || null); + + if (shape.payload) { + property(this, 'payload', shape.payload); + } + + if (typeof shape.xmlNamespace === 'string') { + property(this, 'xmlNamespaceUri', shape.xmlNamespace); + } else if (typeof shape.xmlNamespace === 'object') { + property(this, 'xmlNamespacePrefix', shape.xmlNamespace.prefix); + property(this, 'xmlNamespaceUri', shape.xmlNamespace.uri); + } +} + +function ListShape(shape, options) { + var self = this, firstInit = !this.isShape; + CompositeShape.apply(this, arguments); + + if (firstInit) { + property(this, 'defaultValue', function() { return []; }); + } + + if (shape.member) { + memoizedProperty(this, 'member', function() { + return Shape.create(shape.member, options); + }); + } + + if (this.flattened) { + var oldName = this.name; + memoizedProperty(this, 'name', function() { + return self.member.name || oldName; + }); + } +} + +function MapShape(shape, options) { + var firstInit = !this.isShape; + CompositeShape.apply(this, arguments); + + if (firstInit) { + property(this, 'defaultValue', function() { return {}; }); + property(this, 'key', Shape.create({type: 'string'}, options)); + property(this, 'value', Shape.create({type: 'string'}, options)); + } + + if (shape.key) { + memoizedProperty(this, 'key', function() { + return Shape.create(shape.key, options); + }); + } + if (shape.value) { + memoizedProperty(this, 'value', function() { + return Shape.create(shape.value, options); + }); + } +} + +function TimestampShape(shape) { + var self = this; + Shape.apply(this, arguments); + + if (shape.timestampFormat) { + property(this, 'timestampFormat', shape.timestampFormat); + } else if (self.isTimestampFormatSet && this.timestampFormat) { + property(this, 'timestampFormat', this.timestampFormat); + } else if (this.location === 'header') { + property(this, 'timestampFormat', 'rfc822'); + } else if (this.location === 'querystring') { + property(this, 'timestampFormat', 'iso8601'); + } else if (this.api) { + switch (this.api.protocol) { + case 'json': + case 'rest-json': + property(this, 'timestampFormat', 'unixTimestamp'); + break; + case 'rest-xml': + case 'query': + case 'ec2': + property(this, 'timestampFormat', 'iso8601'); + break; + } + } + + this.toType = function(value) { + if (value === null || value === undefined) return null; + if (typeof value.toUTCString === 'function') return value; + return typeof value === 'string' || typeof value === 'number' ? + util.date.parseTimestamp(value) : null; + }; + + this.toWireFormat = function(value) { + return util.date.format(value, self.timestampFormat); + }; +} + +function StringShape() { + Shape.apply(this, arguments); + + var nullLessProtocols = ['rest-xml', 'query', 'ec2']; + this.toType = function(value) { + value = this.api && nullLessProtocols.indexOf(this.api.protocol) > -1 ? + value || '' : value; + if (this.isJsonValue) { + return JSON.parse(value); + } + + return value && typeof value.toString === 'function' ? + value.toString() : value; + }; + + this.toWireFormat = function(value) { + return this.isJsonValue ? JSON.stringify(value) : value; + }; +} + +function FloatShape() { + Shape.apply(this, arguments); + + this.toType = function(value) { + if (value === null || value === undefined) return null; + return parseFloat(value); + }; + this.toWireFormat = this.toType; +} + +function IntegerShape() { + Shape.apply(this, arguments); + + this.toType = function(value) { + if (value === null || value === undefined) return null; + return parseInt(value, 10); + }; + this.toWireFormat = this.toType; +} + +function BinaryShape() { + Shape.apply(this, arguments); + this.toType = function(value) { + var buf = util.base64.decode(value); + if (this.isSensitive && util.isNode() && typeof util.Buffer.alloc === 'function') { + /* Node.js can create a Buffer that is not isolated. + * i.e. buf.byteLength !== buf.buffer.byteLength + * This means that the sensitive data is accessible to anyone with access to buf.buffer. + * If this is the node shared Buffer, then other code within this process _could_ find this secret. + * Copy sensitive data to an isolated Buffer and zero the sensitive data. + * While this is safe to do here, copying this code somewhere else may produce unexpected results. + */ + var secureBuf = util.Buffer.alloc(buf.length, buf); + buf.fill(0); + buf = secureBuf; + } + return buf; + }; + this.toWireFormat = util.base64.encode; +} + +function Base64Shape() { + BinaryShape.apply(this, arguments); +} + +function BooleanShape() { + Shape.apply(this, arguments); + + this.toType = function(value) { + if (typeof value === 'boolean') return value; + if (value === null || value === undefined) return null; + return value === 'true'; + }; +} + +/** + * @api private + */ +Shape.shapes = { + StructureShape: StructureShape, + ListShape: ListShape, + MapShape: MapShape, + StringShape: StringShape, + BooleanShape: BooleanShape, + Base64Shape: Base64Shape +}; + +/** + * @api private + */ +module.exports = Shape; + + +/***/ }), + +/***/ 73639: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var util = __nccwpck_require__(77985); + +var region_utils = __nccwpck_require__(99517); +var isFipsRegion = region_utils.isFipsRegion; +var getRealRegion = region_utils.getRealRegion; + +util.isBrowser = function() { return false; }; +util.isNode = function() { return true; }; + +// node.js specific modules +util.crypto.lib = __nccwpck_require__(6113); +util.Buffer = (__nccwpck_require__(14300).Buffer); +util.domain = __nccwpck_require__(13639); +util.stream = __nccwpck_require__(12781); +util.url = __nccwpck_require__(57310); +util.querystring = __nccwpck_require__(63477); +util.environment = 'nodejs'; +util.createEventStream = util.stream.Readable ? + (__nccwpck_require__(69643).createEventStream) : (__nccwpck_require__(63727).createEventStream); +util.realClock = __nccwpck_require__(81370); +util.clientSideMonitoring = { + Publisher: (__nccwpck_require__(66807).Publisher), + configProvider: __nccwpck_require__(91822), +}; +util.iniLoader = (__nccwpck_require__(29697)/* .iniLoader */ .b); +util.getSystemErrorName = (__nccwpck_require__(73837).getSystemErrorName); + +util.loadConfig = function(options) { + var envValue = options.environmentVariableSelector(process.env); + if (envValue !== undefined) { + return envValue; + } + + var configFile = {}; + try { + configFile = util.iniLoader ? util.iniLoader.loadFrom({ + isConfig: true, + filename: process.env[util.sharedConfigFileEnv] + }) : {}; + } catch (e) {} + var sharedFileConfig = configFile[ + process.env.AWS_PROFILE || util.defaultProfile + ] || {}; + var configValue = options.configFileSelector(sharedFileConfig); + if (configValue !== undefined) { + return configValue; + } + + if (typeof options.default === 'function') { + return options.default(); + } + return options.default; +}; + +var AWS; + +/** + * @api private + */ +module.exports = AWS = __nccwpck_require__(28437); + +__nccwpck_require__(53819); +__nccwpck_require__(36965); +__nccwpck_require__(77360); +__nccwpck_require__(57083); +__nccwpck_require__(74998); +__nccwpck_require__(3498); +__nccwpck_require__(15037); +__nccwpck_require__(80371); + +// Load the xml2js XML parser +AWS.XML.Parser = __nccwpck_require__(96752); + +// Load Node HTTP client +__nccwpck_require__(2310); + +__nccwpck_require__(95417); + +// Load custom credential providers +__nccwpck_require__(11017); +__nccwpck_require__(73379); +__nccwpck_require__(88764); +__nccwpck_require__(10645); +__nccwpck_require__(57714); +__nccwpck_require__(27454); +__nccwpck_require__(13754); +__nccwpck_require__(80371); +__nccwpck_require__(68335); + +// Setup default providers for credentials chain +// If this changes, please update documentation for +// AWS.CredentialProviderChain.defaultProviders in +// credentials/credential_provider_chain.js +AWS.CredentialProviderChain.defaultProviders = [ + function () { return new AWS.EnvironmentCredentials('AWS'); }, + function () { return new AWS.EnvironmentCredentials('AMAZON'); }, + function () { return new AWS.SsoCredentials(); }, + function () { return new AWS.SharedIniFileCredentials(); }, + function () { return new AWS.ECSCredentials(); }, + function () { return new AWS.ProcessCredentials(); }, + function () { return new AWS.TokenFileWebIdentityCredentials(); }, + function () { return new AWS.EC2MetadataCredentials(); } +]; + +// Load custom token providers +__nccwpck_require__(82647); +__nccwpck_require__(50126); +__nccwpck_require__(90327); + +// Setup default providers for token chain +// If this changes, please update documentation for +// AWS.TokenProviderChain.defaultProviders in +// token/token_provider_chain.js +AWS.TokenProviderChain.defaultProviders = [ + function () { return new AWS.SSOTokenProvider(); }, +]; + +var getRegion = function() { + var env = process.env; + var region = env.AWS_REGION || env.AMAZON_REGION; + if (env[AWS.util.configOptInEnv]) { + var toCheck = [ + {filename: env[AWS.util.sharedCredentialsFileEnv]}, + {isConfig: true, filename: env[AWS.util.sharedConfigFileEnv]} + ]; + var iniLoader = AWS.util.iniLoader; + while (!region && toCheck.length) { + var configFile = {}; + var fileInfo = toCheck.shift(); + try { + configFile = iniLoader.loadFrom(fileInfo); + } catch (err) { + if (fileInfo.isConfig) throw err; + } + var profile = configFile[env.AWS_PROFILE || AWS.util.defaultProfile]; + region = profile && profile.region; + } + } + return region; +}; + +var getBooleanValue = function(value) { + return value === 'true' ? true: value === 'false' ? false: undefined; +}; + +var USE_FIPS_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: function(env) { + return getBooleanValue(env['AWS_USE_FIPS_ENDPOINT']); + }, + configFileSelector: function(profile) { + return getBooleanValue(profile['use_fips_endpoint']); + }, + default: false, +}; + +var USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: function(env) { + return getBooleanValue(env['AWS_USE_DUALSTACK_ENDPOINT']); + }, + configFileSelector: function(profile) { + return getBooleanValue(profile['use_dualstack_endpoint']); + }, + default: false, +}; + +// Update configuration keys +AWS.util.update(AWS.Config.prototype.keys, { + credentials: function () { + var credentials = null; + new AWS.CredentialProviderChain([ + function () { return new AWS.EnvironmentCredentials('AWS'); }, + function () { return new AWS.EnvironmentCredentials('AMAZON'); }, + function () { return new AWS.SharedIniFileCredentials({ disableAssumeRole: true }); } + ]).resolve(function(err, creds) { + if (!err) credentials = creds; + }); + return credentials; + }, + credentialProvider: function() { + return new AWS.CredentialProviderChain(); + }, + logger: function () { + return process.env.AWSJS_DEBUG ? console : null; + }, + region: function() { + var region = getRegion(); + return region ? getRealRegion(region): undefined; + }, + tokenProvider: function() { + return new AWS.TokenProviderChain(); + }, + useFipsEndpoint: function() { + var region = getRegion(); + return isFipsRegion(region) + ? true + : util.loadConfig(USE_FIPS_ENDPOINT_CONFIG_OPTIONS); + }, + useDualstackEndpoint: function() { + return util.loadConfig(USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS); + } +}); + +// Reset configuration +AWS.config = new AWS.Config(); + + +/***/ }), + +/***/ 99127: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +/** + * @api private + */ +AWS.ParamValidator = AWS.util.inherit({ + /** + * Create a new validator object. + * + * @param validation [Boolean|map] whether input parameters should be + * validated against the operation description before sending the + * request. Pass a map to enable any of the following specific + * validation features: + * + * * **min** [Boolean] — Validates that a value meets the min + * constraint. This is enabled by default when paramValidation is set + * to `true`. + * * **max** [Boolean] — Validates that a value meets the max + * constraint. + * * **pattern** [Boolean] — Validates that a string value matches a + * regular expression. + * * **enum** [Boolean] — Validates that a string value matches one + * of the allowable enum values. + */ + constructor: function ParamValidator(validation) { + if (validation === true || validation === undefined) { + validation = {'min': true}; + } + this.validation = validation; + }, + + validate: function validate(shape, params, context) { + this.errors = []; + this.validateMember(shape, params || {}, context || 'params'); + + if (this.errors.length > 1) { + var msg = this.errors.join('\n* '); + msg = 'There were ' + this.errors.length + + ' validation errors:\n* ' + msg; + throw AWS.util.error(new Error(msg), + {code: 'MultipleValidationErrors', errors: this.errors}); + } else if (this.errors.length === 1) { + throw this.errors[0]; + } else { + return true; + } + }, + + fail: function fail(code, message) { + this.errors.push(AWS.util.error(new Error(message), {code: code})); + }, + + validateStructure: function validateStructure(shape, params, context) { + if (shape.isDocument) return true; + + this.validateType(params, context, ['object'], 'structure'); + var paramName; + for (var i = 0; shape.required && i < shape.required.length; i++) { + paramName = shape.required[i]; + var value = params[paramName]; + if (value === undefined || value === null) { + this.fail('MissingRequiredParameter', + 'Missing required key \'' + paramName + '\' in ' + context); + } + } + + // validate hash members + for (paramName in params) { + if (!Object.prototype.hasOwnProperty.call(params, paramName)) continue; + + var paramValue = params[paramName], + memberShape = shape.members[paramName]; + + if (memberShape !== undefined) { + var memberContext = [context, paramName].join('.'); + this.validateMember(memberShape, paramValue, memberContext); + } else if (paramValue !== undefined && paramValue !== null) { + this.fail('UnexpectedParameter', + 'Unexpected key \'' + paramName + '\' found in ' + context); + } + } + + return true; + }, + + validateMember: function validateMember(shape, param, context) { + switch (shape.type) { + case 'structure': + return this.validateStructure(shape, param, context); + case 'list': + return this.validateList(shape, param, context); + case 'map': + return this.validateMap(shape, param, context); + default: + return this.validateScalar(shape, param, context); + } + }, + + validateList: function validateList(shape, params, context) { + if (this.validateType(params, context, [Array])) { + this.validateRange(shape, params.length, context, 'list member count'); + // validate array members + for (var i = 0; i < params.length; i++) { + this.validateMember(shape.member, params[i], context + '[' + i + ']'); + } + } + }, + + validateMap: function validateMap(shape, params, context) { + if (this.validateType(params, context, ['object'], 'map')) { + // Build up a count of map members to validate range traits. + var mapCount = 0; + for (var param in params) { + if (!Object.prototype.hasOwnProperty.call(params, param)) continue; + // Validate any map key trait constraints + this.validateMember(shape.key, param, + context + '[key=\'' + param + '\']'); + this.validateMember(shape.value, params[param], + context + '[\'' + param + '\']'); + mapCount++; + } + this.validateRange(shape, mapCount, context, 'map member count'); + } + }, + + validateScalar: function validateScalar(shape, value, context) { + switch (shape.type) { + case null: + case undefined: + case 'string': + return this.validateString(shape, value, context); + case 'base64': + case 'binary': + return this.validatePayload(value, context); + case 'integer': + case 'float': + return this.validateNumber(shape, value, context); + case 'boolean': + return this.validateType(value, context, ['boolean']); + case 'timestamp': + return this.validateType(value, context, [Date, + /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?Z$/, 'number'], + 'Date object, ISO-8601 string, or a UNIX timestamp'); + default: + return this.fail('UnkownType', 'Unhandled type ' + + shape.type + ' for ' + context); + } + }, + + validateString: function validateString(shape, value, context) { + var validTypes = ['string']; + if (shape.isJsonValue) { + validTypes = validTypes.concat(['number', 'object', 'boolean']); + } + if (value !== null && this.validateType(value, context, validTypes)) { + this.validateEnum(shape, value, context); + this.validateRange(shape, value.length, context, 'string length'); + this.validatePattern(shape, value, context); + this.validateUri(shape, value, context); + } + }, + + validateUri: function validateUri(shape, value, context) { + if (shape['location'] === 'uri') { + if (value.length === 0) { + this.fail('UriParameterError', 'Expected uri parameter to have length >= 1,' + + ' but found "' + value +'" for ' + context); + } + } + }, + + validatePattern: function validatePattern(shape, value, context) { + if (this.validation['pattern'] && shape['pattern'] !== undefined) { + if (!(new RegExp(shape['pattern'])).test(value)) { + this.fail('PatternMatchError', 'Provided value "' + value + '" ' + + 'does not match regex pattern /' + shape['pattern'] + '/ for ' + + context); + } + } + }, + + validateRange: function validateRange(shape, value, context, descriptor) { + if (this.validation['min']) { + if (shape['min'] !== undefined && value < shape['min']) { + this.fail('MinRangeError', 'Expected ' + descriptor + ' >= ' + + shape['min'] + ', but found ' + value + ' for ' + context); + } + } + if (this.validation['max']) { + if (shape['max'] !== undefined && value > shape['max']) { + this.fail('MaxRangeError', 'Expected ' + descriptor + ' <= ' + + shape['max'] + ', but found ' + value + ' for ' + context); + } + } + }, + + validateEnum: function validateRange(shape, value, context) { + if (this.validation['enum'] && shape['enum'] !== undefined) { + // Fail if the string value is not present in the enum list + if (shape['enum'].indexOf(value) === -1) { + this.fail('EnumError', 'Found string value of ' + value + ', but ' + + 'expected ' + shape['enum'].join('|') + ' for ' + context); + } + } + }, + + validateType: function validateType(value, context, acceptedTypes, type) { + // We will not log an error for null or undefined, but we will return + // false so that callers know that the expected type was not strictly met. + if (value === null || value === undefined) return false; + + var foundInvalidType = false; + for (var i = 0; i < acceptedTypes.length; i++) { + if (typeof acceptedTypes[i] === 'string') { + if (typeof value === acceptedTypes[i]) return true; + } else if (acceptedTypes[i] instanceof RegExp) { + if ((value || '').toString().match(acceptedTypes[i])) return true; + } else { + if (value instanceof acceptedTypes[i]) return true; + if (AWS.util.isType(value, acceptedTypes[i])) return true; + if (!type && !foundInvalidType) acceptedTypes = acceptedTypes.slice(); + acceptedTypes[i] = AWS.util.typeName(acceptedTypes[i]); + } + foundInvalidType = true; + } + + var acceptedType = type; + if (!acceptedType) { + acceptedType = acceptedTypes.join(', ').replace(/,([^,]+)$/, ', or$1'); + } + + var vowel = acceptedType.match(/^[aeiou]/i) ? 'n' : ''; + this.fail('InvalidParameterType', 'Expected ' + context + ' to be a' + + vowel + ' ' + acceptedType); + return false; + }, + + validateNumber: function validateNumber(shape, value, context) { + if (value === null || value === undefined) return; + if (typeof value === 'string') { + var castedValue = parseFloat(value); + if (castedValue.toString() === value) value = castedValue; + } + if (this.validateType(value, context, ['number'])) { + this.validateRange(shape, value, context, 'numeric value'); + } + }, + + validatePayload: function validatePayload(value, context) { + if (value === null || value === undefined) return; + if (typeof value === 'string') return; + if (value && typeof value.byteLength === 'number') return; // typed arrays + if (AWS.util.isNode()) { // special check for buffer/stream in Node.js + var Stream = AWS.util.stream.Stream; + if (AWS.util.Buffer.isBuffer(value) || value instanceof Stream) return; + } else { + if (typeof Blob !== void 0 && value instanceof Blob) return; + } + + var types = ['Buffer', 'Stream', 'File', 'Blob', 'ArrayBuffer', 'DataView']; + if (value) { + for (var i = 0; i < types.length; i++) { + if (AWS.util.isType(value, types[i])) return; + if (AWS.util.typeName(value.constructor) === types[i]) return; + } + } + + this.fail('InvalidParameterType', 'Expected ' + context + ' to be a ' + + 'string, Buffer, Stream, Blob, or typed array object'); + } +}); + + +/***/ }), + +/***/ 44086: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var rest = AWS.Protocol.Rest; + +/** + * A presigner object can be used to generate presigned urls for the Polly service. + */ +AWS.Polly.Presigner = AWS.util.inherit({ + /** + * Creates a presigner object with a set of configuration options. + * + * @option options params [map] An optional map of parameters to bind to every + * request sent by this service object. + * @option options service [AWS.Polly] An optional pre-configured instance + * of the AWS.Polly service object to use for requests. The object may + * bound parameters used by the presigner. + * @see AWS.Polly.constructor + */ + constructor: function Signer(options) { + options = options || {}; + this.options = options; + this.service = options.service; + this.bindServiceObject(options); + this._operations = {}; + }, + + /** + * @api private + */ + bindServiceObject: function bindServiceObject(options) { + options = options || {}; + if (!this.service) { + this.service = new AWS.Polly(options); + } else { + var config = AWS.util.copy(this.service.config); + this.service = new this.service.constructor.__super__(config); + this.service.config.params = AWS.util.merge(this.service.config.params || {}, options.params); + } + }, + + /** + * @api private + */ + modifyInputMembers: function modifyInputMembers(input) { + // make copies of the input so we don't overwrite the api + // need to be careful to copy anything we access/modify + var modifiedInput = AWS.util.copy(input); + modifiedInput.members = AWS.util.copy(input.members); + AWS.util.each(input.members, function(name, member) { + modifiedInput.members[name] = AWS.util.copy(member); + // update location and locationName + if (!member.location || member.location === 'body') { + modifiedInput.members[name].location = 'querystring'; + modifiedInput.members[name].locationName = name; + } + }); + return modifiedInput; + }, + + /** + * @api private + */ + convertPostToGet: function convertPostToGet(req) { + // convert method + req.httpRequest.method = 'GET'; + + var operation = req.service.api.operations[req.operation]; + // get cached operation input first + var input = this._operations[req.operation]; + if (!input) { + // modify the original input + this._operations[req.operation] = input = this.modifyInputMembers(operation.input); + } + + var uri = rest.generateURI(req.httpRequest.endpoint.path, operation.httpPath, input, req.params); + + req.httpRequest.path = uri; + req.httpRequest.body = ''; + + // don't need these headers on a GET request + delete req.httpRequest.headers['Content-Length']; + delete req.httpRequest.headers['Content-Type']; + }, + + /** + * @overload getSynthesizeSpeechUrl(params = {}, [expires = 3600], [callback]) + * Generate a presigned url for {AWS.Polly.synthesizeSpeech}. + * @note You must ensure that you have static or previously resolved + * credentials if you call this method synchronously (with no callback), + * otherwise it may not properly sign the request. If you cannot guarantee + * this (you are using an asynchronous credential provider, i.e., EC2 + * IAM roles), you should always call this method with an asynchronous + * callback. + * @param params [map] parameters to pass to the operation. See the {AWS.Polly.synthesizeSpeech} + * operation for the expected operation parameters. + * @param expires [Integer] (3600) the number of seconds to expire the pre-signed URL operation in. + * Defaults to 1 hour. + * @return [string] if called synchronously (with no callback), returns the signed URL. + * @return [null] nothing is returned if a callback is provided. + * @callback callback function (err, url) + * If a callback is supplied, it is called when a signed URL has been generated. + * @param err [Error] the error object returned from the presigner. + * @param url [String] the signed URL. + * @see AWS.Polly.synthesizeSpeech + */ + getSynthesizeSpeechUrl: function getSynthesizeSpeechUrl(params, expires, callback) { + var self = this; + var request = this.service.makeRequest('synthesizeSpeech', params); + // remove existing build listeners + request.removeAllListeners('build'); + request.on('build', function(req) { + self.convertPostToGet(req); + }); + return request.presign(expires, callback); + } +}); + + +/***/ }), + +/***/ 97969: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var util = __nccwpck_require__(77985); +var AWS = __nccwpck_require__(28437); + +/** + * Prepend prefix defined by API model to endpoint that's already + * constructed. This feature does not apply to operations using + * endpoint discovery and can be disabled. + * @api private + */ +function populateHostPrefix(request) { + var enabled = request.service.config.hostPrefixEnabled; + if (!enabled) return request; + var operationModel = request.service.api.operations[request.operation]; + //don't marshal host prefix when operation has endpoint discovery traits + if (hasEndpointDiscover(request)) return request; + if (operationModel.endpoint && operationModel.endpoint.hostPrefix) { + var hostPrefixNotation = operationModel.endpoint.hostPrefix; + var hostPrefix = expandHostPrefix(hostPrefixNotation, request.params, operationModel.input); + prependEndpointPrefix(request.httpRequest.endpoint, hostPrefix); + validateHostname(request.httpRequest.endpoint.hostname); + } + return request; +} + +/** + * @api private + */ +function hasEndpointDiscover(request) { + var api = request.service.api; + var operationModel = api.operations[request.operation]; + var isEndpointOperation = api.endpointOperation && (api.endpointOperation === util.string.lowerFirst(operationModel.name)); + return (operationModel.endpointDiscoveryRequired !== 'NULL' || isEndpointOperation === true); +} + +/** + * @api private + */ +function expandHostPrefix(hostPrefixNotation, params, shape) { + util.each(shape.members, function(name, member) { + if (member.hostLabel === true) { + if (typeof params[name] !== 'string' || params[name] === '') { + throw util.error(new Error(), { + message: 'Parameter ' + name + ' should be a non-empty string.', + code: 'InvalidParameter' + }); + } + var regex = new RegExp('\\{' + name + '\\}', 'g'); + hostPrefixNotation = hostPrefixNotation.replace(regex, params[name]); + } + }); + return hostPrefixNotation; +} + +/** + * @api private + */ +function prependEndpointPrefix(endpoint, prefix) { + if (endpoint.host) { + endpoint.host = prefix + endpoint.host; + } + if (endpoint.hostname) { + endpoint.hostname = prefix + endpoint.hostname; + } +} + +/** + * @api private + */ +function validateHostname(hostname) { + var labels = hostname.split('.'); + //Reference: https://tools.ietf.org/html/rfc1123#section-2 + var hostPattern = /^[a-zA-Z0-9]{1}$|^[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9]$/; + util.arrayEach(labels, function(label) { + if (!label.length || label.length < 1 || label.length > 63) { + throw util.error(new Error(), { + code: 'ValidationError', + message: 'Hostname label length should be between 1 to 63 characters, inclusive.' + }); + } + if (!hostPattern.test(label)) { + throw AWS.util.error(new Error(), + {code: 'ValidationError', message: label + ' is not hostname compatible.'}); + } + }); +} + +module.exports = { + populateHostPrefix: populateHostPrefix +}; + + +/***/ }), + +/***/ 30083: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var util = __nccwpck_require__(77985); +var JsonBuilder = __nccwpck_require__(47495); +var JsonParser = __nccwpck_require__(5474); +var populateHostPrefix = (__nccwpck_require__(97969).populateHostPrefix); + +function buildRequest(req) { + var httpRequest = req.httpRequest; + var api = req.service.api; + var target = api.targetPrefix + '.' + api.operations[req.operation].name; + var version = api.jsonVersion || '1.0'; + var input = api.operations[req.operation].input; + var builder = new JsonBuilder(); + + if (version === 1) version = '1.0'; + + if (api.awsQueryCompatible) { + if (!httpRequest.params) { + httpRequest.params = {}; + } + // because Query protocol does this. + Object.assign(httpRequest.params, req.params); + } + + httpRequest.body = builder.build(req.params || {}, input); + httpRequest.headers['Content-Type'] = 'application/x-amz-json-' + version; + httpRequest.headers['X-Amz-Target'] = target; + + populateHostPrefix(req); +} + +function extractError(resp) { + var error = {}; + var httpResponse = resp.httpResponse; + + error.code = httpResponse.headers['x-amzn-errortype'] || 'UnknownError'; + if (typeof error.code === 'string') { + error.code = error.code.split(':')[0]; + } + + if (httpResponse.body.length > 0) { + try { + var e = JSON.parse(httpResponse.body.toString()); + + var code = e.__type || e.code || e.Code; + if (code) { + error.code = code.split('#').pop(); + } + if (error.code === 'RequestEntityTooLarge') { + error.message = 'Request body must be less than 1 MB'; + } else { + error.message = (e.message || e.Message || null); + } + + // The minimized models do not have error shapes, so + // without expanding the model size, it's not possible + // to validate the response shape (members) or + // check if any are sensitive to logging. + + // Assign the fields as non-enumerable, allowing specific access only. + for (var key in e || {}) { + if (key === 'code' || key === 'message') { + continue; + } + error['[' + key + ']'] = 'See error.' + key + ' for details.'; + Object.defineProperty(error, key, { + value: e[key], + enumerable: false, + writable: true + }); + } + } catch (e) { + error.statusCode = httpResponse.statusCode; + error.message = httpResponse.statusMessage; + } + } else { + error.statusCode = httpResponse.statusCode; + error.message = httpResponse.statusCode.toString(); + } + + resp.error = util.error(new Error(), error); +} + +function extractData(resp) { + var body = resp.httpResponse.body.toString() || '{}'; + if (resp.request.service.config.convertResponseTypes === false) { + resp.data = JSON.parse(body); + } else { + var operation = resp.request.service.api.operations[resp.request.operation]; + var shape = operation.output || {}; + var parser = new JsonParser(); + resp.data = parser.parse(body, shape); + } +} + +/** + * @api private + */ +module.exports = { + buildRequest: buildRequest, + extractError: extractError, + extractData: extractData +}; + + +/***/ }), + +/***/ 90761: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var util = __nccwpck_require__(77985); +var QueryParamSerializer = __nccwpck_require__(45175); +var Shape = __nccwpck_require__(71349); +var populateHostPrefix = (__nccwpck_require__(97969).populateHostPrefix); + +function buildRequest(req) { + var operation = req.service.api.operations[req.operation]; + var httpRequest = req.httpRequest; + httpRequest.headers['Content-Type'] = + 'application/x-www-form-urlencoded; charset=utf-8'; + httpRequest.params = { + Version: req.service.api.apiVersion, + Action: operation.name + }; + + // convert the request parameters into a list of query params, + // e.g. Deeply.NestedParam.0.Name=value + var builder = new QueryParamSerializer(); + builder.serialize(req.params, operation.input, function(name, value) { + httpRequest.params[name] = value; + }); + httpRequest.body = util.queryParamsToString(httpRequest.params); + + populateHostPrefix(req); +} + +function extractError(resp) { + var data, body = resp.httpResponse.body.toString(); + if (body.match(' { + +var util = __nccwpck_require__(77985); +var populateHostPrefix = (__nccwpck_require__(97969).populateHostPrefix); + +function populateMethod(req) { + req.httpRequest.method = req.service.api.operations[req.operation].httpMethod; +} + +function generateURI(endpointPath, operationPath, input, params) { + var uri = [endpointPath, operationPath].join('/'); + uri = uri.replace(/\/+/g, '/'); + + var queryString = {}, queryStringSet = false; + util.each(input.members, function (name, member) { + var paramValue = params[name]; + if (paramValue === null || paramValue === undefined) return; + if (member.location === 'uri') { + var regex = new RegExp('\\{' + member.name + '(\\+)?\\}'); + uri = uri.replace(regex, function(_, plus) { + var fn = plus ? util.uriEscapePath : util.uriEscape; + return fn(String(paramValue)); + }); + } else if (member.location === 'querystring') { + queryStringSet = true; + + if (member.type === 'list') { + queryString[member.name] = paramValue.map(function(val) { + return util.uriEscape(member.member.toWireFormat(val).toString()); + }); + } else if (member.type === 'map') { + util.each(paramValue, function(key, value) { + if (Array.isArray(value)) { + queryString[key] = value.map(function(val) { + return util.uriEscape(String(val)); + }); + } else { + queryString[key] = util.uriEscape(String(value)); + } + }); + } else { + queryString[member.name] = util.uriEscape(member.toWireFormat(paramValue).toString()); + } + } + }); + + if (queryStringSet) { + uri += (uri.indexOf('?') >= 0 ? '&' : '?'); + var parts = []; + util.arrayEach(Object.keys(queryString).sort(), function(key) { + if (!Array.isArray(queryString[key])) { + queryString[key] = [queryString[key]]; + } + for (var i = 0; i < queryString[key].length; i++) { + parts.push(util.uriEscape(String(key)) + '=' + queryString[key][i]); + } + }); + uri += parts.join('&'); + } + + return uri; +} + +function populateURI(req) { + var operation = req.service.api.operations[req.operation]; + var input = operation.input; + + var uri = generateURI(req.httpRequest.endpoint.path, operation.httpPath, input, req.params); + req.httpRequest.path = uri; +} + +function populateHeaders(req) { + var operation = req.service.api.operations[req.operation]; + util.each(operation.input.members, function (name, member) { + var value = req.params[name]; + if (value === null || value === undefined) return; + + if (member.location === 'headers' && member.type === 'map') { + util.each(value, function(key, memberValue) { + req.httpRequest.headers[member.name + key] = memberValue; + }); + } else if (member.location === 'header') { + value = member.toWireFormat(value).toString(); + if (member.isJsonValue) { + value = util.base64.encode(value); + } + req.httpRequest.headers[member.name] = value; + } + }); +} + +function buildRequest(req) { + populateMethod(req); + populateURI(req); + populateHeaders(req); + populateHostPrefix(req); +} + +function extractError() { +} + +function extractData(resp) { + var req = resp.request; + var data = {}; + var r = resp.httpResponse; + var operation = req.service.api.operations[req.operation]; + var output = operation.output; + + // normalize headers names to lower-cased keys for matching + var headers = {}; + util.each(r.headers, function (k, v) { + headers[k.toLowerCase()] = v; + }); + + util.each(output.members, function(name, member) { + var header = (member.name || name).toLowerCase(); + if (member.location === 'headers' && member.type === 'map') { + data[name] = {}; + var location = member.isLocationName ? member.name : ''; + var pattern = new RegExp('^' + location + '(.+)', 'i'); + util.each(r.headers, function (k, v) { + var result = k.match(pattern); + if (result !== null) { + data[name][result[1]] = v; + } + }); + } else if (member.location === 'header') { + if (headers[header] !== undefined) { + var value = member.isJsonValue ? + util.base64.decode(headers[header]) : + headers[header]; + data[name] = member.toType(value); + } + } else if (member.location === 'statusCode') { + data[name] = parseInt(r.statusCode, 10); + } + }); + + resp.data = data; +} + +/** + * @api private + */ +module.exports = { + buildRequest: buildRequest, + extractError: extractError, + extractData: extractData, + generateURI: generateURI +}; + + +/***/ }), + +/***/ 5883: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var util = __nccwpck_require__(77985); +var Rest = __nccwpck_require__(98200); +var Json = __nccwpck_require__(30083); +var JsonBuilder = __nccwpck_require__(47495); +var JsonParser = __nccwpck_require__(5474); + +var METHODS_WITHOUT_BODY = ['GET', 'HEAD', 'DELETE']; + +function unsetContentLength(req) { + var payloadMember = util.getRequestPayloadShape(req); + if ( + payloadMember === undefined && + METHODS_WITHOUT_BODY.indexOf(req.httpRequest.method) >= 0 + ) { + delete req.httpRequest.headers['Content-Length']; + } +} + +function populateBody(req) { + var builder = new JsonBuilder(); + var input = req.service.api.operations[req.operation].input; + + if (input.payload) { + var params = {}; + var payloadShape = input.members[input.payload]; + params = req.params[input.payload]; + + if (payloadShape.type === 'structure') { + req.httpRequest.body = builder.build(params || {}, payloadShape); + applyContentTypeHeader(req); + } else if (params !== undefined) { + // non-JSON payload + req.httpRequest.body = params; + if (payloadShape.type === 'binary' || payloadShape.isStreaming) { + applyContentTypeHeader(req, true); + } + } + } else { + req.httpRequest.body = builder.build(req.params, input); + applyContentTypeHeader(req); + } +} + +function applyContentTypeHeader(req, isBinary) { + if (!req.httpRequest.headers['Content-Type']) { + var type = isBinary ? 'binary/octet-stream' : 'application/json'; + req.httpRequest.headers['Content-Type'] = type; + } +} + +function buildRequest(req) { + Rest.buildRequest(req); + + // never send body payload on GET/HEAD/DELETE + if (METHODS_WITHOUT_BODY.indexOf(req.httpRequest.method) < 0) { + populateBody(req); + } +} + +function extractError(resp) { + Json.extractError(resp); +} + +function extractData(resp) { + Rest.extractData(resp); + + var req = resp.request; + var operation = req.service.api.operations[req.operation]; + var rules = req.service.api.operations[req.operation].output || {}; + var parser; + var hasEventOutput = operation.hasEventOutput; + + if (rules.payload) { + var payloadMember = rules.members[rules.payload]; + var body = resp.httpResponse.body; + if (payloadMember.isEventStream) { + parser = new JsonParser(); + resp.data[payload] = util.createEventStream( + AWS.HttpClient.streamsApiVersion === 2 ? resp.httpResponse.stream : body, + parser, + payloadMember + ); + } else if (payloadMember.type === 'structure' || payloadMember.type === 'list') { + var parser = new JsonParser(); + resp.data[rules.payload] = parser.parse(body, payloadMember); + } else if (payloadMember.type === 'binary' || payloadMember.isStreaming) { + resp.data[rules.payload] = body; + } else { + resp.data[rules.payload] = payloadMember.toType(body); + } + } else { + var data = resp.data; + Json.extractData(resp); + resp.data = util.merge(data, resp.data); + } +} + +/** + * @api private + */ +module.exports = { + buildRequest: buildRequest, + extractError: extractError, + extractData: extractData, + unsetContentLength: unsetContentLength +}; + + +/***/ }), + +/***/ 15143: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var util = __nccwpck_require__(77985); +var Rest = __nccwpck_require__(98200); + +function populateBody(req) { + var input = req.service.api.operations[req.operation].input; + var builder = new AWS.XML.Builder(); + var params = req.params; + + var payload = input.payload; + if (payload) { + var payloadMember = input.members[payload]; + params = params[payload]; + if (params === undefined) return; + + if (payloadMember.type === 'structure') { + var rootElement = payloadMember.name; + req.httpRequest.body = builder.toXML(params, payloadMember, rootElement, true); + } else { // non-xml payload + req.httpRequest.body = params; + } + } else { + req.httpRequest.body = builder.toXML(params, input, input.name || + input.shape || util.string.upperFirst(req.operation) + 'Request'); + } +} + +function buildRequest(req) { + Rest.buildRequest(req); + + // never send body payload on GET/HEAD + if (['GET', 'HEAD'].indexOf(req.httpRequest.method) < 0) { + populateBody(req); + } +} + +function extractError(resp) { + Rest.extractError(resp); + + var data; + try { + data = new AWS.XML.Parser().parse(resp.httpResponse.body.toString()); + } catch (e) { + data = { + Code: resp.httpResponse.statusCode, + Message: resp.httpResponse.statusMessage + }; + } + + if (data.Errors) data = data.Errors; + if (data.Error) data = data.Error; + if (data.Code) { + resp.error = util.error(new Error(), { + code: data.Code, + message: data.Message + }); + } else { + resp.error = util.error(new Error(), { + code: resp.httpResponse.statusCode, + message: null + }); + } +} + +function extractData(resp) { + Rest.extractData(resp); + + var parser; + var req = resp.request; + var body = resp.httpResponse.body; + var operation = req.service.api.operations[req.operation]; + var output = operation.output; + + var hasEventOutput = operation.hasEventOutput; + + var payload = output.payload; + if (payload) { + var payloadMember = output.members[payload]; + if (payloadMember.isEventStream) { + parser = new AWS.XML.Parser(); + resp.data[payload] = util.createEventStream( + AWS.HttpClient.streamsApiVersion === 2 ? resp.httpResponse.stream : resp.httpResponse.body, + parser, + payloadMember + ); + } else if (payloadMember.type === 'structure') { + parser = new AWS.XML.Parser(); + resp.data[payload] = parser.parse(body.toString(), payloadMember); + } else if (payloadMember.type === 'binary' || payloadMember.isStreaming) { + resp.data[payload] = body; + } else { + resp.data[payload] = payloadMember.toType(body); + } + } else if (body.length > 0) { + parser = new AWS.XML.Parser(); + var data = parser.parse(body.toString(), output); + util.update(resp.data, data); + } +} + +/** + * @api private + */ +module.exports = { + buildRequest: buildRequest, + extractError: extractError, + extractData: extractData +}; + + +/***/ }), + +/***/ 91822: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +/** + * Resolve client-side monitoring configuration from either environmental variables + * or shared config file. Configurations from environmental variables have higher priority + * than those from shared config file. The resolver will try to read the shared config file + * no matter whether the AWS_SDK_LOAD_CONFIG variable is set. + * @api private + */ +function resolveMonitoringConfig() { + var config = { + port: undefined, + clientId: undefined, + enabled: undefined, + host: undefined + }; + if (fromEnvironment(config) || fromConfigFile(config)) return toJSType(config); + return toJSType(config); +} + +/** + * Resolve configurations from environmental variables. + * @param {object} client side monitoring config object needs to be resolved + * @returns {boolean} whether resolving configurations is done + * @api private + */ +function fromEnvironment(config) { + config.port = config.port || process.env.AWS_CSM_PORT; + config.enabled = config.enabled || process.env.AWS_CSM_ENABLED; + config.clientId = config.clientId || process.env.AWS_CSM_CLIENT_ID; + config.host = config.host || process.env.AWS_CSM_HOST; + return config.port && config.enabled && config.clientId && config.host || + ['false', '0'].indexOf(config.enabled) >= 0; //no need to read shared config file if explicitely disabled +} + +/** + * Resolve cofigurations from shared config file with specified role name + * @param {object} client side monitoring config object needs to be resolved + * @returns {boolean} whether resolving configurations is done + * @api private + */ +function fromConfigFile(config) { + var sharedFileConfig; + try { + var configFile = AWS.util.iniLoader.loadFrom({ + isConfig: true, + filename: process.env[AWS.util.sharedConfigFileEnv] + }); + var sharedFileConfig = configFile[ + process.env.AWS_PROFILE || AWS.util.defaultProfile + ]; + } catch (err) { + return false; + } + if (!sharedFileConfig) return config; + config.port = config.port || sharedFileConfig.csm_port; + config.enabled = config.enabled || sharedFileConfig.csm_enabled; + config.clientId = config.clientId || sharedFileConfig.csm_client_id; + config.host = config.host || sharedFileConfig.csm_host; + return config.port && config.enabled && config.clientId && config.host; +} + +/** + * Transfer the resolved configuration value to proper types: port as number, enabled + * as boolean and clientId as string. The 'enabled' flag is valued to false when set + * to 'false' or '0'. + * @param {object} resolved client side monitoring config + * @api private + */ +function toJSType(config) { + //config.XXX is either undefined or string + var falsyNotations = ['false', '0', undefined]; + if (!config.enabled || falsyNotations.indexOf(config.enabled.toLowerCase()) >= 0) { + config.enabled = false; + } else { + config.enabled = true; + } + config.port = config.port ? parseInt(config.port, 10) : undefined; + return config; +} + +module.exports = resolveMonitoringConfig; + + +/***/ }), + +/***/ 66807: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var util = (__nccwpck_require__(28437).util); +var dgram = __nccwpck_require__(71891); +var stringToBuffer = util.buffer.toBuffer; + +var MAX_MESSAGE_SIZE = 1024 * 8; // 8 KB + +/** + * Publishes metrics via udp. + * @param {object} options Paramters for Publisher constructor + * @param {number} [options.port = 31000] Port number + * @param {string} [options.clientId = ''] Client Identifier + * @param {boolean} [options.enabled = false] enable sending metrics datagram + * @api private + */ +function Publisher(options) { + // handle configuration + options = options || {}; + this.enabled = options.enabled || false; + this.port = options.port || 31000; + this.clientId = options.clientId || ''; + this.address = options.host || '127.0.0.1'; + if (this.clientId.length > 255) { + // ClientId has a max length of 255 + this.clientId = this.clientId.substr(0, 255); + } + this.messagesInFlight = 0; +} + +Publisher.prototype.fieldsToTrim = { + UserAgent: 256, + SdkException: 128, + SdkExceptionMessage: 512, + AwsException: 128, + AwsExceptionMessage: 512, + FinalSdkException: 128, + FinalSdkExceptionMessage: 512, + FinalAwsException: 128, + FinalAwsExceptionMessage: 512 + +}; + +/** + * Trims fields that have a specified max length. + * @param {object} event ApiCall or ApiCallAttempt event. + * @returns {object} + * @api private + */ +Publisher.prototype.trimFields = function(event) { + var trimmableFields = Object.keys(this.fieldsToTrim); + for (var i = 0, iLen = trimmableFields.length; i < iLen; i++) { + var field = trimmableFields[i]; + if (event.hasOwnProperty(field)) { + var maxLength = this.fieldsToTrim[field]; + var value = event[field]; + if (value && value.length > maxLength) { + event[field] = value.substr(0, maxLength); + } + } + } + return event; +}; + +/** + * Handles ApiCall and ApiCallAttempt events. + * @param {Object} event apiCall or apiCallAttempt event. + * @api private + */ +Publisher.prototype.eventHandler = function(event) { + // set the clientId + event.ClientId = this.clientId; + + this.trimFields(event); + + var message = stringToBuffer(JSON.stringify(event)); + if (!this.enabled || message.length > MAX_MESSAGE_SIZE) { + // drop the message if publisher not enabled or it is too large + return; + } + + this.publishDatagram(message); +}; + +/** + * Publishes message to an agent. + * @param {Buffer} message JSON message to send to agent. + * @api private + */ +Publisher.prototype.publishDatagram = function(message) { + var self = this; + var client = this.getClient(); + + this.messagesInFlight++; + this.client.send(message, 0, message.length, this.port, this.address, function(err, bytes) { + if (--self.messagesInFlight <= 0) { + // destroy existing client so the event loop isn't kept open + self.destroyClient(); + } + }); +}; + +/** + * Returns an existing udp socket, or creates one if it doesn't already exist. + * @api private + */ +Publisher.prototype.getClient = function() { + if (!this.client) { + this.client = dgram.createSocket('udp4'); + } + return this.client; +}; + +/** + * Destroys the udp socket. + * @api private + */ +Publisher.prototype.destroyClient = function() { + if (this.client) { + this.client.close(); + this.client = void 0; + } +}; + +module.exports = { + Publisher: Publisher +}; + + +/***/ }), + +/***/ 45175: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var util = __nccwpck_require__(77985); + +function QueryParamSerializer() { +} + +QueryParamSerializer.prototype.serialize = function(params, shape, fn) { + serializeStructure('', params, shape, fn); +}; + +function ucfirst(shape) { + if (shape.isQueryName || shape.api.protocol !== 'ec2') { + return shape.name; + } else { + return shape.name[0].toUpperCase() + shape.name.substr(1); + } +} + +function serializeStructure(prefix, struct, rules, fn) { + util.each(rules.members, function(name, member) { + var value = struct[name]; + if (value === null || value === undefined) return; + + var memberName = ucfirst(member); + memberName = prefix ? prefix + '.' + memberName : memberName; + serializeMember(memberName, value, member, fn); + }); +} + +function serializeMap(name, map, rules, fn) { + var i = 1; + util.each(map, function (key, value) { + var prefix = rules.flattened ? '.' : '.entry.'; + var position = prefix + (i++) + '.'; + var keyName = position + (rules.key.name || 'key'); + var valueName = position + (rules.value.name || 'value'); + serializeMember(name + keyName, key, rules.key, fn); + serializeMember(name + valueName, value, rules.value, fn); + }); +} + +function serializeList(name, list, rules, fn) { + var memberRules = rules.member || {}; + + if (list.length === 0) { + fn.call(this, name, null); + return; + } + + util.arrayEach(list, function (v, n) { + var suffix = '.' + (n + 1); + if (rules.api.protocol === 'ec2') { + // Do nothing for EC2 + suffix = suffix + ''; // make linter happy + } else if (rules.flattened) { + if (memberRules.name) { + var parts = name.split('.'); + parts.pop(); + parts.push(ucfirst(memberRules)); + name = parts.join('.'); + } + } else { + suffix = '.' + (memberRules.name ? memberRules.name : 'member') + suffix; + } + serializeMember(name + suffix, v, memberRules, fn); + }); +} + +function serializeMember(name, value, rules, fn) { + if (value === null || value === undefined) return; + if (rules.type === 'structure') { + serializeStructure(name, value, rules, fn); + } else if (rules.type === 'list') { + serializeList(name, value, rules, fn); + } else if (rules.type === 'map') { + serializeMap(name, value, rules, fn); + } else { + fn(name, rules.toWireFormat(value).toString()); + } +} + +/** + * @api private + */ +module.exports = QueryParamSerializer; + + +/***/ }), + +/***/ 16612: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +/** + * @api private + */ +var service = null; + +/** + * @api private + */ +var api = { + signatureVersion: 'v4', + signingName: 'rds-db', + operations: {} +}; + +/** + * @api private + */ +var requiredAuthTokenOptions = { + region: 'string', + hostname: 'string', + port: 'number', + username: 'string' +}; + +/** + * A signer object can be used to generate an auth token to a database. + */ +AWS.RDS.Signer = AWS.util.inherit({ + /** + * Creates a signer object can be used to generate an auth token. + * + * @option options credentials [AWS.Credentials] the AWS credentials + * to sign requests with. Uses the default credential provider chain + * if not specified. + * @option options hostname [String] the hostname of the database to connect to. + * @option options port [Number] the port number the database is listening on. + * @option options region [String] the region the database is located in. + * @option options username [String] the username to login as. + * @example Passing in options to constructor + * var signer = new AWS.RDS.Signer({ + * credentials: new AWS.SharedIniFileCredentials({profile: 'default'}), + * region: 'us-east-1', + * hostname: 'db.us-east-1.rds.amazonaws.com', + * port: 8000, + * username: 'name' + * }); + */ + constructor: function Signer(options) { + this.options = options || {}; + }, + + /** + * @api private + * Strips the protocol from a url. + */ + convertUrlToAuthToken: function convertUrlToAuthToken(url) { + // we are always using https as the protocol + var protocol = 'https://'; + if (url.indexOf(protocol) === 0) { + return url.substring(protocol.length); + } + }, + + /** + * @overload getAuthToken(options = {}, [callback]) + * Generate an auth token to a database. + * @note You must ensure that you have static or previously resolved + * credentials if you call this method synchronously (with no callback), + * otherwise it may not properly sign the request. If you cannot guarantee + * this (you are using an asynchronous credential provider, i.e., EC2 + * IAM roles), you should always call this method with an asynchronous + * callback. + * + * @param options [map] The fields to use when generating an auth token. + * Any options specified here will be merged on top of any options passed + * to AWS.RDS.Signer: + * + * * **credentials** (AWS.Credentials) — the AWS credentials + * to sign requests with. Uses the default credential provider chain + * if not specified. + * * **hostname** (String) — the hostname of the database to connect to. + * * **port** (Number) — the port number the database is listening on. + * * **region** (String) — the region the database is located in. + * * **username** (String) — the username to login as. + * @return [String] if called synchronously (with no callback), returns the + * auth token. + * @return [null] nothing is returned if a callback is provided. + * @callback callback function (err, token) + * If a callback is supplied, it is called when an auth token has been generated. + * @param err [Error] the error object returned from the signer. + * @param token [String] the auth token. + * + * @example Generating an auth token synchronously + * var signer = new AWS.RDS.Signer({ + * // configure options + * region: 'us-east-1', + * username: 'default', + * hostname: 'db.us-east-1.amazonaws.com', + * port: 8000 + * }); + * var token = signer.getAuthToken({ + * // these options are merged with those defined when creating the signer, overriding in the case of a duplicate option + * // credentials are not specified here or when creating the signer, so default credential provider will be used + * username: 'test' // overriding username + * }); + * @example Generating an auth token asynchronously + * var signer = new AWS.RDS.Signer({ + * // configure options + * region: 'us-east-1', + * username: 'default', + * hostname: 'db.us-east-1.amazonaws.com', + * port: 8000 + * }); + * signer.getAuthToken({ + * // these options are merged with those defined when creating the signer, overriding in the case of a duplicate option + * // credentials are not specified here or when creating the signer, so default credential provider will be used + * username: 'test' // overriding username + * }, function(err, token) { + * if (err) { + * // handle error + * } else { + * // use token + * } + * }); + * + */ + getAuthToken: function getAuthToken(options, callback) { + if (typeof options === 'function' && callback === undefined) { + callback = options; + options = {}; + } + var self = this; + var hasCallback = typeof callback === 'function'; + // merge options with existing options + options = AWS.util.merge(this.options, options); + // validate options + var optionsValidation = this.validateAuthTokenOptions(options); + if (optionsValidation !== true) { + if (hasCallback) { + return callback(optionsValidation, null); + } + throw optionsValidation; + } + + // 15 minutes + var expires = 900; + // create service to generate a request from + var serviceOptions = { + region: options.region, + endpoint: new AWS.Endpoint(options.hostname + ':' + options.port), + paramValidation: false, + signatureVersion: 'v4' + }; + if (options.credentials) { + serviceOptions.credentials = options.credentials; + } + service = new AWS.Service(serviceOptions); + // ensure the SDK is using sigv4 signing (config is not enough) + service.api = api; + + var request = service.makeRequest(); + // add listeners to request to properly build auth token + this.modifyRequestForAuthToken(request, options); + + if (hasCallback) { + request.presign(expires, function(err, url) { + if (url) { + url = self.convertUrlToAuthToken(url); + } + callback(err, url); + }); + } else { + var url = request.presign(expires); + return this.convertUrlToAuthToken(url); + } + }, + + /** + * @api private + * Modifies a request to allow the presigner to generate an auth token. + */ + modifyRequestForAuthToken: function modifyRequestForAuthToken(request, options) { + request.on('build', request.buildAsGet); + var httpRequest = request.httpRequest; + httpRequest.body = AWS.util.queryParamsToString({ + Action: 'connect', + DBUser: options.username + }); + }, + + /** + * @api private + * Validates that the options passed in contain all the keys with values of the correct type that + * are needed to generate an auth token. + */ + validateAuthTokenOptions: function validateAuthTokenOptions(options) { + // iterate over all keys in options + var message = ''; + options = options || {}; + for (var key in requiredAuthTokenOptions) { + if (!Object.prototype.hasOwnProperty.call(requiredAuthTokenOptions, key)) { + continue; + } + if (typeof options[key] !== requiredAuthTokenOptions[key]) { + message += 'option \'' + key + '\' should have been type \'' + requiredAuthTokenOptions[key] + '\', was \'' + typeof options[key] + '\'.\n'; + } + } + if (message.length) { + return AWS.util.error(new Error(), { + code: 'InvalidParameter', + message: message + }); + } + return true; + } +}); + + +/***/ }), + +/***/ 81370: +/***/ ((module) => { + +module.exports = { + //provide realtime clock for performance measurement + now: function now() { + var second = process.hrtime(); + return second[0] * 1000 + (second[1] / 1000000); + } +}; + + +/***/ }), + +/***/ 99517: +/***/ ((module) => { + +function isFipsRegion(region) { + return typeof region === 'string' && (region.startsWith('fips-') || region.endsWith('-fips')); +} + +function isGlobalRegion(region) { + return typeof region === 'string' && ['aws-global', 'aws-us-gov-global'].includes(region); +} + +function getRealRegion(region) { + return ['fips-aws-global', 'aws-fips', 'aws-global'].includes(region) + ? 'us-east-1' + : ['fips-aws-us-gov-global', 'aws-us-gov-global'].includes(region) + ? 'us-gov-west-1' + : region.replace(/fips-(dkr-|prod-)?|-fips/, ''); +} + +module.exports = { + isFipsRegion: isFipsRegion, + isGlobalRegion: isGlobalRegion, + getRealRegion: getRealRegion +}; + + +/***/ }), + +/***/ 18262: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var util = __nccwpck_require__(77985); +var regionConfig = __nccwpck_require__(80738); + +function generateRegionPrefix(region) { + if (!region) return null; + var parts = region.split('-'); + if (parts.length < 3) return null; + return parts.slice(0, parts.length - 2).join('-') + '-*'; +} + +function derivedKeys(service) { + var region = service.config.region; + var regionPrefix = generateRegionPrefix(region); + var endpointPrefix = service.api.endpointPrefix; + + return [ + [region, endpointPrefix], + [regionPrefix, endpointPrefix], + [region, '*'], + [regionPrefix, '*'], + ['*', endpointPrefix], + [region, 'internal-*'], + ['*', '*'] + ].map(function(item) { + return item[0] && item[1] ? item.join('/') : null; + }); +} + +function applyConfig(service, config) { + util.each(config, function(key, value) { + if (key === 'globalEndpoint') return; + if (service.config[key] === undefined || service.config[key] === null) { + service.config[key] = value; + } + }); +} + +function configureEndpoint(service) { + var keys = derivedKeys(service); + var useFipsEndpoint = service.config.useFipsEndpoint; + var useDualstackEndpoint = service.config.useDualstackEndpoint; + for (var i = 0; i < keys.length; i++) { + var key = keys[i]; + if (!key) continue; + + var rules = useFipsEndpoint + ? useDualstackEndpoint + ? regionConfig.dualstackFipsRules + : regionConfig.fipsRules + : useDualstackEndpoint + ? regionConfig.dualstackRules + : regionConfig.rules; + + if (Object.prototype.hasOwnProperty.call(rules, key)) { + var config = rules[key]; + if (typeof config === 'string') { + config = regionConfig.patterns[config]; + } + + // set global endpoint + service.isGlobalEndpoint = !!config.globalEndpoint; + if (config.signingRegion) { + service.signingRegion = config.signingRegion; + } + + // signature version + if (!config.signatureVersion) { + // Note: config is a global object and should not be mutated here. + // However, we are retaining this line for backwards compatibility. + // The non-v4 signatureVersion will be set in a copied object below. + config.signatureVersion = 'v4'; + } + + var useBearer = (service.api && service.api.signatureVersion) === 'bearer'; + + // merge config + applyConfig(service, Object.assign( + {}, + config, + { signatureVersion: useBearer ? 'bearer' : config.signatureVersion } + )); + return; + } + } +} + +function getEndpointSuffix(region) { + var regionRegexes = { + '^(us|eu|ap|sa|ca|me)\\-\\w+\\-\\d+$': 'amazonaws.com', + '^cn\\-\\w+\\-\\d+$': 'amazonaws.com.cn', + '^us\\-gov\\-\\w+\\-\\d+$': 'amazonaws.com', + '^us\\-iso\\-\\w+\\-\\d+$': 'c2s.ic.gov', + '^us\\-isob\\-\\w+\\-\\d+$': 'sc2s.sgov.gov' + }; + var defaultSuffix = 'amazonaws.com'; + var regexes = Object.keys(regionRegexes); + for (var i = 0; i < regexes.length; i++) { + var regionPattern = RegExp(regexes[i]); + var dnsSuffix = regionRegexes[regexes[i]]; + if (regionPattern.test(region)) return dnsSuffix; + } + return defaultSuffix; +} + +/** + * @api private + */ +module.exports = { + configureEndpoint: configureEndpoint, + getEndpointSuffix: getEndpointSuffix, +}; + + +/***/ }), + +/***/ 78652: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var AcceptorStateMachine = __nccwpck_require__(68118); +var inherit = AWS.util.inherit; +var domain = AWS.util.domain; +var jmespath = __nccwpck_require__(87783); + +/** + * @api private + */ +var hardErrorStates = {success: 1, error: 1, complete: 1}; + +function isTerminalState(machine) { + return Object.prototype.hasOwnProperty.call(hardErrorStates, machine._asm.currentState); +} + +var fsm = new AcceptorStateMachine(); +fsm.setupStates = function() { + var transition = function(_, done) { + var self = this; + self._haltHandlersOnError = false; + + self.emit(self._asm.currentState, function(err) { + if (err) { + if (isTerminalState(self)) { + if (domain && self.domain instanceof domain.Domain) { + err.domainEmitter = self; + err.domain = self.domain; + err.domainThrown = false; + self.domain.emit('error', err); + } else { + throw err; + } + } else { + self.response.error = err; + done(err); + } + } else { + done(self.response.error); + } + }); + + }; + + this.addState('validate', 'build', 'error', transition); + this.addState('build', 'afterBuild', 'restart', transition); + this.addState('afterBuild', 'sign', 'restart', transition); + this.addState('sign', 'send', 'retry', transition); + this.addState('retry', 'afterRetry', 'afterRetry', transition); + this.addState('afterRetry', 'sign', 'error', transition); + this.addState('send', 'validateResponse', 'retry', transition); + this.addState('validateResponse', 'extractData', 'extractError', transition); + this.addState('extractError', 'extractData', 'retry', transition); + this.addState('extractData', 'success', 'retry', transition); + this.addState('restart', 'build', 'error', transition); + this.addState('success', 'complete', 'complete', transition); + this.addState('error', 'complete', 'complete', transition); + this.addState('complete', null, null, transition); +}; +fsm.setupStates(); + +/** + * ## Asynchronous Requests + * + * All requests made through the SDK are asynchronous and use a + * callback interface. Each service method that kicks off a request + * returns an `AWS.Request` object that you can use to register + * callbacks. + * + * For example, the following service method returns the request + * object as "request", which can be used to register callbacks: + * + * ```javascript + * // request is an AWS.Request object + * var request = ec2.describeInstances(); + * + * // register callbacks on request to retrieve response data + * request.on('success', function(response) { + * console.log(response.data); + * }); + * ``` + * + * When a request is ready to be sent, the {send} method should + * be called: + * + * ```javascript + * request.send(); + * ``` + * + * Since registered callbacks may or may not be idempotent, requests should only + * be sent once. To perform the same operation multiple times, you will need to + * create multiple request objects, each with its own registered callbacks. + * + * ## Removing Default Listeners for Events + * + * Request objects are built with default listeners for the various events, + * depending on the service type. In some cases, you may want to remove + * some built-in listeners to customize behaviour. Doing this requires + * access to the built-in listener functions, which are exposed through + * the {AWS.EventListeners.Core} namespace. For instance, you may + * want to customize the HTTP handler used when sending a request. In this + * case, you can remove the built-in listener associated with the 'send' + * event, the {AWS.EventListeners.Core.SEND} listener and add your own. + * + * ## Multiple Callbacks and Chaining + * + * You can register multiple callbacks on any request object. The + * callbacks can be registered for different events, or all for the + * same event. In addition, you can chain callback registration, for + * example: + * + * ```javascript + * request. + * on('success', function(response) { + * console.log("Success!"); + * }). + * on('error', function(error, response) { + * console.log("Error!"); + * }). + * on('complete', function(response) { + * console.log("Always!"); + * }). + * send(); + * ``` + * + * The above example will print either "Success! Always!", or "Error! Always!", + * depending on whether the request succeeded or not. + * + * @!attribute httpRequest + * @readonly + * @!group HTTP Properties + * @return [AWS.HttpRequest] the raw HTTP request object + * containing request headers and body information + * sent by the service. + * + * @!attribute startTime + * @readonly + * @!group Operation Properties + * @return [Date] the time that the request started + * + * @!group Request Building Events + * + * @!event validate(request) + * Triggered when a request is being validated. Listeners + * should throw an error if the request should not be sent. + * @param request [Request] the request object being sent + * @see AWS.EventListeners.Core.VALIDATE_CREDENTIALS + * @see AWS.EventListeners.Core.VALIDATE_REGION + * @example Ensuring that a certain parameter is set before sending a request + * var req = s3.putObject(params); + * req.on('validate', function() { + * if (!req.params.Body.match(/^Hello\s/)) { + * throw new Error('Body must start with "Hello "'); + * } + * }); + * req.send(function(err, data) { ... }); + * + * @!event build(request) + * Triggered when the request payload is being built. Listeners + * should fill the necessary information to send the request + * over HTTP. + * @param (see AWS.Request~validate) + * @example Add a custom HTTP header to a request + * var req = s3.putObject(params); + * req.on('build', function() { + * req.httpRequest.headers['Custom-Header'] = 'value'; + * }); + * req.send(function(err, data) { ... }); + * + * @!event sign(request) + * Triggered when the request is being signed. Listeners should + * add the correct authentication headers and/or adjust the body, + * depending on the authentication mechanism being used. + * @param (see AWS.Request~validate) + * + * @!group Request Sending Events + * + * @!event send(response) + * Triggered when the request is ready to be sent. Listeners + * should call the underlying transport layer to initiate + * the sending of the request. + * @param response [Response] the response object + * @context [Request] the request object that was sent + * @see AWS.EventListeners.Core.SEND + * + * @!event retry(response) + * Triggered when a request failed and might need to be retried or redirected. + * If the response is retryable, the listener should set the + * `response.error.retryable` property to `true`, and optionally set + * `response.error.retryDelay` to the millisecond delay for the next attempt. + * In the case of a redirect, `response.error.redirect` should be set to + * `true` with `retryDelay` set to an optional delay on the next request. + * + * If a listener decides that a request should not be retried, + * it should set both `retryable` and `redirect` to false. + * + * Note that a retryable error will be retried at most + * {AWS.Config.maxRetries} times (based on the service object's config). + * Similarly, a request that is redirected will only redirect at most + * {AWS.Config.maxRedirects} times. + * + * @param (see AWS.Request~send) + * @context (see AWS.Request~send) + * @example Adding a custom retry for a 404 response + * request.on('retry', function(response) { + * // this resource is not yet available, wait 10 seconds to get it again + * if (response.httpResponse.statusCode === 404 && response.error) { + * response.error.retryable = true; // retry this error + * response.error.retryDelay = 10000; // wait 10 seconds + * } + * }); + * + * @!group Data Parsing Events + * + * @!event extractError(response) + * Triggered on all non-2xx requests so that listeners can extract + * error details from the response body. Listeners to this event + * should set the `response.error` property. + * @param (see AWS.Request~send) + * @context (see AWS.Request~send) + * + * @!event extractData(response) + * Triggered in successful requests to allow listeners to + * de-serialize the response body into `response.data`. + * @param (see AWS.Request~send) + * @context (see AWS.Request~send) + * + * @!group Completion Events + * + * @!event success(response) + * Triggered when the request completed successfully. + * `response.data` will contain the response data and + * `response.error` will be null. + * @param (see AWS.Request~send) + * @context (see AWS.Request~send) + * + * @!event error(error, response) + * Triggered when an error occurs at any point during the + * request. `response.error` will contain details about the error + * that occurred. `response.data` will be null. + * @param error [Error] the error object containing details about + * the error that occurred. + * @param (see AWS.Request~send) + * @context (see AWS.Request~send) + * + * @!event complete(response) + * Triggered whenever a request cycle completes. `response.error` + * should be checked, since the request may have failed. + * @param (see AWS.Request~send) + * @context (see AWS.Request~send) + * + * @!group HTTP Events + * + * @!event httpHeaders(statusCode, headers, response, statusMessage) + * Triggered when headers are sent by the remote server + * @param statusCode [Integer] the HTTP response code + * @param headers [map] the response headers + * @param (see AWS.Request~send) + * @param statusMessage [String] A status message corresponding to the HTTP + * response code + * @context (see AWS.Request~send) + * + * @!event httpData(chunk, response) + * Triggered when data is sent by the remote server + * @param chunk [Buffer] the buffer data containing the next data chunk + * from the server + * @param (see AWS.Request~send) + * @context (see AWS.Request~send) + * @see AWS.EventListeners.Core.HTTP_DATA + * + * @!event httpUploadProgress(progress, response) + * Triggered when the HTTP request has uploaded more data + * @param progress [map] An object containing the `loaded` and `total` bytes + * of the request. + * @param (see AWS.Request~send) + * @context (see AWS.Request~send) + * @note This event will not be emitted in Node.js 0.8.x. + * + * @!event httpDownloadProgress(progress, response) + * Triggered when the HTTP request has downloaded more data + * @param progress [map] An object containing the `loaded` and `total` bytes + * of the request. + * @param (see AWS.Request~send) + * @context (see AWS.Request~send) + * @note This event will not be emitted in Node.js 0.8.x. + * + * @!event httpError(error, response) + * Triggered when the HTTP request failed + * @param error [Error] the error object that was thrown + * @param (see AWS.Request~send) + * @context (see AWS.Request~send) + * + * @!event httpDone(response) + * Triggered when the server is finished sending data + * @param (see AWS.Request~send) + * @context (see AWS.Request~send) + * + * @see AWS.Response + */ +AWS.Request = inherit({ + + /** + * Creates a request for an operation on a given service with + * a set of input parameters. + * + * @param service [AWS.Service] the service to perform the operation on + * @param operation [String] the operation to perform on the service + * @param params [Object] parameters to send to the operation. + * See the operation's documentation for the format of the + * parameters. + */ + constructor: function Request(service, operation, params) { + var endpoint = service.endpoint; + var region = service.config.region; + var customUserAgent = service.config.customUserAgent; + + if (service.signingRegion) { + region = service.signingRegion; + } else if (service.isGlobalEndpoint) { + region = 'us-east-1'; + } + + this.domain = domain && domain.active; + this.service = service; + this.operation = operation; + this.params = params || {}; + this.httpRequest = new AWS.HttpRequest(endpoint, region); + this.httpRequest.appendToUserAgent(customUserAgent); + this.startTime = service.getSkewCorrectedDate(); + + this.response = new AWS.Response(this); + this._asm = new AcceptorStateMachine(fsm.states, 'validate'); + this._haltHandlersOnError = false; + + AWS.SequentialExecutor.call(this); + this.emit = this.emitEvent; + }, + + /** + * @!group Sending a Request + */ + + /** + * @overload send(callback = null) + * Sends the request object. + * + * @callback callback function(err, data) + * If a callback is supplied, it is called when a response is returned + * from the service. + * @context [AWS.Request] the request object being sent. + * @param err [Error] the error object returned from the request. + * Set to `null` if the request is successful. + * @param data [Object] the de-serialized data returned from + * the request. Set to `null` if a request error occurs. + * @example Sending a request with a callback + * request = s3.putObject({Bucket: 'bucket', Key: 'key'}); + * request.send(function(err, data) { console.log(err, data); }); + * @example Sending a request with no callback (using event handlers) + * request = s3.putObject({Bucket: 'bucket', Key: 'key'}); + * request.on('complete', function(response) { ... }); // register a callback + * request.send(); + */ + send: function send(callback) { + if (callback) { + // append to user agent + this.httpRequest.appendToUserAgent('callback'); + this.on('complete', function (resp) { + callback.call(resp, resp.error, resp.data); + }); + } + this.runTo(); + + return this.response; + }, + + /** + * @!method promise() + * Sends the request and returns a 'thenable' promise. + * + * Two callbacks can be provided to the `then` method on the returned promise. + * The first callback will be called if the promise is fulfilled, and the second + * callback will be called if the promise is rejected. + * @callback fulfilledCallback function(data) + * Called if the promise is fulfilled. + * @param data [Object] the de-serialized data returned from the request. + * @callback rejectedCallback function(error) + * Called if the promise is rejected. + * @param error [Error] the error object returned from the request. + * @return [Promise] A promise that represents the state of the request. + * @example Sending a request using promises. + * var request = s3.putObject({Bucket: 'bucket', Key: 'key'}); + * var result = request.promise(); + * result.then(function(data) { ... }, function(error) { ... }); + */ + + /** + * @api private + */ + build: function build(callback) { + return this.runTo('send', callback); + }, + + /** + * @api private + */ + runTo: function runTo(state, done) { + this._asm.runTo(state, done, this); + return this; + }, + + /** + * Aborts a request, emitting the error and complete events. + * + * @!macro nobrowser + * @example Aborting a request after sending + * var params = { + * Bucket: 'bucket', Key: 'key', + * Body: Buffer.alloc(1024 * 1024 * 5) // 5MB payload + * }; + * var request = s3.putObject(params); + * request.send(function (err, data) { + * if (err) console.log("Error:", err.code, err.message); + * else console.log(data); + * }); + * + * // abort request in 1 second + * setTimeout(request.abort.bind(request), 1000); + * + * // prints "Error: RequestAbortedError Request aborted by user" + * @return [AWS.Request] the same request object, for chaining. + * @since v1.4.0 + */ + abort: function abort() { + this.removeAllListeners('validateResponse'); + this.removeAllListeners('extractError'); + this.on('validateResponse', function addAbortedError(resp) { + resp.error = AWS.util.error(new Error('Request aborted by user'), { + code: 'RequestAbortedError', retryable: false + }); + }); + + if (this.httpRequest.stream && !this.httpRequest.stream.didCallback) { // abort HTTP stream + this.httpRequest.stream.abort(); + if (this.httpRequest._abortCallback) { + this.httpRequest._abortCallback(); + } else { + this.removeAllListeners('send'); // haven't sent yet, so let's not + } + } + + return this; + }, + + /** + * Iterates over each page of results given a pageable request, calling + * the provided callback with each page of data. After all pages have been + * retrieved, the callback is called with `null` data. + * + * @note This operation can generate multiple requests to a service. + * @example Iterating over multiple pages of objects in an S3 bucket + * var pages = 1; + * s3.listObjects().eachPage(function(err, data) { + * if (err) return; + * console.log("Page", pages++); + * console.log(data); + * }); + * @example Iterating over multiple pages with an asynchronous callback + * s3.listObjects(params).eachPage(function(err, data, done) { + * doSomethingAsyncAndOrExpensive(function() { + * // The next page of results isn't fetched until done is called + * done(); + * }); + * }); + * @callback callback function(err, data, [doneCallback]) + * Called with each page of resulting data from the request. If the + * optional `doneCallback` is provided in the function, it must be called + * when the callback is complete. + * + * @param err [Error] an error object, if an error occurred. + * @param data [Object] a single page of response data. If there is no + * more data, this object will be `null`. + * @param doneCallback [Function] an optional done callback. If this + * argument is defined in the function declaration, it should be called + * when the next page is ready to be retrieved. This is useful for + * controlling serial pagination across asynchronous operations. + * @return [Boolean] if the callback returns `false`, pagination will + * stop. + * + * @see AWS.Request.eachItem + * @see AWS.Response.nextPage + * @since v1.4.0 + */ + eachPage: function eachPage(callback) { + // Make all callbacks async-ish + callback = AWS.util.fn.makeAsync(callback, 3); + + function wrappedCallback(response) { + callback.call(response, response.error, response.data, function (result) { + if (result === false) return; + + if (response.hasNextPage()) { + response.nextPage().on('complete', wrappedCallback).send(); + } else { + callback.call(response, null, null, AWS.util.fn.noop); + } + }); + } + + this.on('complete', wrappedCallback).send(); + }, + + /** + * Enumerates over individual items of a request, paging the responses if + * necessary. + * + * @api experimental + * @since v1.4.0 + */ + eachItem: function eachItem(callback) { + var self = this; + function wrappedCallback(err, data) { + if (err) return callback(err, null); + if (data === null) return callback(null, null); + + var config = self.service.paginationConfig(self.operation); + var resultKey = config.resultKey; + if (Array.isArray(resultKey)) resultKey = resultKey[0]; + var items = jmespath.search(data, resultKey); + var continueIteration = true; + AWS.util.arrayEach(items, function(item) { + continueIteration = callback(null, item); + if (continueIteration === false) { + return AWS.util.abort; + } + }); + return continueIteration; + } + + this.eachPage(wrappedCallback); + }, + + /** + * @return [Boolean] whether the operation can return multiple pages of + * response data. + * @see AWS.Response.eachPage + * @since v1.4.0 + */ + isPageable: function isPageable() { + return this.service.paginationConfig(this.operation) ? true : false; + }, + + /** + * Sends the request and converts the request object into a readable stream + * that can be read from or piped into a writable stream. + * + * @note The data read from a readable stream contains only + * the raw HTTP body contents. + * @example Manually reading from a stream + * request.createReadStream().on('data', function(data) { + * console.log("Got data:", data.toString()); + * }); + * @example Piping a request body into a file + * var out = fs.createWriteStream('/path/to/outfile.jpg'); + * s3.service.getObject(params).createReadStream().pipe(out); + * @return [Stream] the readable stream object that can be piped + * or read from (by registering 'data' event listeners). + * @!macro nobrowser + */ + createReadStream: function createReadStream() { + var streams = AWS.util.stream; + var req = this; + var stream = null; + + if (AWS.HttpClient.streamsApiVersion === 2) { + stream = new streams.PassThrough(); + process.nextTick(function() { req.send(); }); + } else { + stream = new streams.Stream(); + stream.readable = true; + + stream.sent = false; + stream.on('newListener', function(event) { + if (!stream.sent && event === 'data') { + stream.sent = true; + process.nextTick(function() { req.send(); }); + } + }); + } + + this.on('error', function(err) { + stream.emit('error', err); + }); + + this.on('httpHeaders', function streamHeaders(statusCode, headers, resp) { + if (statusCode < 300) { + req.removeListener('httpData', AWS.EventListeners.Core.HTTP_DATA); + req.removeListener('httpError', AWS.EventListeners.Core.HTTP_ERROR); + req.on('httpError', function streamHttpError(error) { + resp.error = error; + resp.error.retryable = false; + }); + + var shouldCheckContentLength = false; + var expectedLen; + if (req.httpRequest.method !== 'HEAD') { + expectedLen = parseInt(headers['content-length'], 10); + } + if (expectedLen !== undefined && !isNaN(expectedLen) && expectedLen >= 0) { + shouldCheckContentLength = true; + var receivedLen = 0; + } + + var checkContentLengthAndEmit = function checkContentLengthAndEmit() { + if (shouldCheckContentLength && receivedLen !== expectedLen) { + stream.emit('error', AWS.util.error( + new Error('Stream content length mismatch. Received ' + + receivedLen + ' of ' + expectedLen + ' bytes.'), + { code: 'StreamContentLengthMismatch' } + )); + } else if (AWS.HttpClient.streamsApiVersion === 2) { + stream.end(); + } else { + stream.emit('end'); + } + }; + + var httpStream = resp.httpResponse.createUnbufferedStream(); + + if (AWS.HttpClient.streamsApiVersion === 2) { + if (shouldCheckContentLength) { + var lengthAccumulator = new streams.PassThrough(); + lengthAccumulator._write = function(chunk) { + if (chunk && chunk.length) { + receivedLen += chunk.length; + } + return streams.PassThrough.prototype._write.apply(this, arguments); + }; + + lengthAccumulator.on('end', checkContentLengthAndEmit); + stream.on('error', function(err) { + shouldCheckContentLength = false; + httpStream.unpipe(lengthAccumulator); + lengthAccumulator.emit('end'); + lengthAccumulator.end(); + }); + httpStream.pipe(lengthAccumulator).pipe(stream, { end: false }); + } else { + httpStream.pipe(stream); + } + } else { + + if (shouldCheckContentLength) { + httpStream.on('data', function(arg) { + if (arg && arg.length) { + receivedLen += arg.length; + } + }); + } + + httpStream.on('data', function(arg) { + stream.emit('data', arg); + }); + httpStream.on('end', checkContentLengthAndEmit); + } + + httpStream.on('error', function(err) { + shouldCheckContentLength = false; + stream.emit('error', err); + }); + } + }); + + return stream; + }, + + /** + * @param [Array,Response] args This should be the response object, + * or an array of args to send to the event. + * @api private + */ + emitEvent: function emit(eventName, args, done) { + if (typeof args === 'function') { done = args; args = null; } + if (!done) done = function() { }; + if (!args) args = this.eventParameters(eventName, this.response); + + var origEmit = AWS.SequentialExecutor.prototype.emit; + origEmit.call(this, eventName, args, function (err) { + if (err) this.response.error = err; + done.call(this, err); + }); + }, + + /** + * @api private + */ + eventParameters: function eventParameters(eventName) { + switch (eventName) { + case 'restart': + case 'validate': + case 'sign': + case 'build': + case 'afterValidate': + case 'afterBuild': + return [this]; + case 'error': + return [this.response.error, this.response]; + default: + return [this.response]; + } + }, + + /** + * @api private + */ + presign: function presign(expires, callback) { + if (!callback && typeof expires === 'function') { + callback = expires; + expires = null; + } + return new AWS.Signers.Presign().sign(this.toGet(), expires, callback); + }, + + /** + * @api private + */ + isPresigned: function isPresigned() { + return Object.prototype.hasOwnProperty.call(this.httpRequest.headers, 'presigned-expires'); + }, + + /** + * @api private + */ + toUnauthenticated: function toUnauthenticated() { + this._unAuthenticated = true; + this.removeListener('validate', AWS.EventListeners.Core.VALIDATE_CREDENTIALS); + this.removeListener('sign', AWS.EventListeners.Core.SIGN); + return this; + }, + + /** + * @api private + */ + toGet: function toGet() { + if (this.service.api.protocol === 'query' || + this.service.api.protocol === 'ec2') { + this.removeListener('build', this.buildAsGet); + this.addListener('build', this.buildAsGet); + } + return this; + }, + + /** + * @api private + */ + buildAsGet: function buildAsGet(request) { + request.httpRequest.method = 'GET'; + request.httpRequest.path = request.service.endpoint.path + + '?' + request.httpRequest.body; + request.httpRequest.body = ''; + + // don't need these headers on a GET request + delete request.httpRequest.headers['Content-Length']; + delete request.httpRequest.headers['Content-Type']; + }, + + /** + * @api private + */ + haltHandlersOnError: function haltHandlersOnError() { + this._haltHandlersOnError = true; + } +}); + +/** + * @api private + */ +AWS.Request.addPromisesToClass = function addPromisesToClass(PromiseDependency) { + this.prototype.promise = function promise() { + var self = this; + // append to user agent + this.httpRequest.appendToUserAgent('promise'); + return new PromiseDependency(function(resolve, reject) { + self.on('complete', function(resp) { + if (resp.error) { + reject(resp.error); + } else { + // define $response property so that it is not enumerable + // this prevents circular reference errors when stringifying the JSON object + resolve(Object.defineProperty( + resp.data || {}, + '$response', + {value: resp} + )); + } + }); + self.runTo(); + }); + }; +}; + +/** + * @api private + */ +AWS.Request.deletePromisesFromClass = function deletePromisesFromClass() { + delete this.prototype.promise; +}; + +AWS.util.addPromises(AWS.Request); + +AWS.util.mixin(AWS.Request, AWS.SequentialExecutor); + + +/***/ }), + +/***/ 39925: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +/** + * Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). You + * may not use this file except in compliance with the License. A copy of + * the License is located at + * + * http://aws.amazon.com/apache2.0/ + * + * or in the "license" file accompanying this file. This file is + * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF + * ANY KIND, either express or implied. See the License for the specific + * language governing permissions and limitations under the License. + */ + +var AWS = __nccwpck_require__(28437); +var inherit = AWS.util.inherit; +var jmespath = __nccwpck_require__(87783); + +/** + * @api private + */ +function CHECK_ACCEPTORS(resp) { + var waiter = resp.request._waiter; + var acceptors = waiter.config.acceptors; + var acceptorMatched = false; + var state = 'retry'; + + acceptors.forEach(function(acceptor) { + if (!acceptorMatched) { + var matcher = waiter.matchers[acceptor.matcher]; + if (matcher && matcher(resp, acceptor.expected, acceptor.argument)) { + acceptorMatched = true; + state = acceptor.state; + } + } + }); + + if (!acceptorMatched && resp.error) state = 'failure'; + + if (state === 'success') { + waiter.setSuccess(resp); + } else { + waiter.setError(resp, state === 'retry'); + } +} + +/** + * @api private + */ +AWS.ResourceWaiter = inherit({ + /** + * Waits for a given state on a service object + * @param service [Service] the service object to wait on + * @param state [String] the state (defined in waiter configuration) to wait + * for. + * @example Create a waiter for running EC2 instances + * var ec2 = new AWS.EC2; + * var waiter = new AWS.ResourceWaiter(ec2, 'instanceRunning'); + */ + constructor: function constructor(service, state) { + this.service = service; + this.state = state; + this.loadWaiterConfig(this.state); + }, + + service: null, + + state: null, + + config: null, + + matchers: { + path: function(resp, expected, argument) { + try { + var result = jmespath.search(resp.data, argument); + } catch (err) { + return false; + } + + return jmespath.strictDeepEqual(result,expected); + }, + + pathAll: function(resp, expected, argument) { + try { + var results = jmespath.search(resp.data, argument); + } catch (err) { + return false; + } + + if (!Array.isArray(results)) results = [results]; + var numResults = results.length; + if (!numResults) return false; + for (var ind = 0 ; ind < numResults; ind++) { + if (!jmespath.strictDeepEqual(results[ind], expected)) { + return false; + } + } + return true; + }, + + pathAny: function(resp, expected, argument) { + try { + var results = jmespath.search(resp.data, argument); + } catch (err) { + return false; + } + + if (!Array.isArray(results)) results = [results]; + var numResults = results.length; + for (var ind = 0 ; ind < numResults; ind++) { + if (jmespath.strictDeepEqual(results[ind], expected)) { + return true; + } + } + return false; + }, + + status: function(resp, expected) { + var statusCode = resp.httpResponse.statusCode; + return (typeof statusCode === 'number') && (statusCode === expected); + }, + + error: function(resp, expected) { + if (typeof expected === 'string' && resp.error) { + return expected === resp.error.code; + } + // if expected is not string, can be boolean indicating presence of error + return expected === !!resp.error; + } + }, + + listeners: new AWS.SequentialExecutor().addNamedListeners(function(add) { + add('RETRY_CHECK', 'retry', function(resp) { + var waiter = resp.request._waiter; + if (resp.error && resp.error.code === 'ResourceNotReady') { + resp.error.retryDelay = (waiter.config.delay || 0) * 1000; + } + }); + + add('CHECK_OUTPUT', 'extractData', CHECK_ACCEPTORS); + + add('CHECK_ERROR', 'extractError', CHECK_ACCEPTORS); + }), + + /** + * @return [AWS.Request] + */ + wait: function wait(params, callback) { + if (typeof params === 'function') { + callback = params; params = undefined; + } + + if (params && params.$waiter) { + params = AWS.util.copy(params); + if (typeof params.$waiter.delay === 'number') { + this.config.delay = params.$waiter.delay; + } + if (typeof params.$waiter.maxAttempts === 'number') { + this.config.maxAttempts = params.$waiter.maxAttempts; + } + delete params.$waiter; + } + + var request = this.service.makeRequest(this.config.operation, params); + request._waiter = this; + request.response.maxRetries = this.config.maxAttempts; + request.addListeners(this.listeners); + + if (callback) request.send(callback); + return request; + }, + + setSuccess: function setSuccess(resp) { + resp.error = null; + resp.data = resp.data || {}; + resp.request.removeAllListeners('extractData'); + }, + + setError: function setError(resp, retryable) { + resp.data = null; + resp.error = AWS.util.error(resp.error || new Error(), { + code: 'ResourceNotReady', + message: 'Resource is not in the state ' + this.state, + retryable: retryable + }); + }, + + /** + * Loads waiter configuration from API configuration + * + * @api private + */ + loadWaiterConfig: function loadWaiterConfig(state) { + if (!this.service.api.waiters[state]) { + throw new AWS.util.error(new Error(), { + code: 'StateNotFoundError', + message: 'State ' + state + ' not found.' + }); + } + + this.config = AWS.util.copy(this.service.api.waiters[state]); + } +}); + + +/***/ }), + +/***/ 58743: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var inherit = AWS.util.inherit; +var jmespath = __nccwpck_require__(87783); + +/** + * This class encapsulates the response information + * from a service request operation sent through {AWS.Request}. + * The response object has two main properties for getting information + * back from a request: + * + * ## The `data` property + * + * The `response.data` property contains the serialized object data + * retrieved from the service request. For instance, for an + * Amazon DynamoDB `listTables` method call, the response data might + * look like: + * + * ``` + * > resp.data + * { TableNames: + * [ 'table1', 'table2', ... ] } + * ``` + * + * The `data` property can be null if an error occurs (see below). + * + * ## The `error` property + * + * In the event of a service error (or transfer error), the + * `response.error` property will be filled with the given + * error data in the form: + * + * ``` + * { code: 'SHORT_UNIQUE_ERROR_CODE', + * message: 'Some human readable error message' } + * ``` + * + * In the case of an error, the `data` property will be `null`. + * Note that if you handle events that can be in a failure state, + * you should always check whether `response.error` is set + * before attempting to access the `response.data` property. + * + * @!attribute data + * @readonly + * @!group Data Properties + * @note Inside of a {AWS.Request~httpData} event, this + * property contains a single raw packet instead of the + * full de-serialized service response. + * @return [Object] the de-serialized response data + * from the service. + * + * @!attribute error + * An structure containing information about a service + * or networking error. + * @readonly + * @!group Data Properties + * @note This attribute is only filled if a service or + * networking error occurs. + * @return [Error] + * * code [String] a unique short code representing the + * error that was emitted. + * * message [String] a longer human readable error message + * * retryable [Boolean] whether the error message is + * retryable. + * * statusCode [Numeric] in the case of a request that reached the service, + * this value contains the response status code. + * * time [Date] the date time object when the error occurred. + * * hostname [String] set when a networking error occurs to easily + * identify the endpoint of the request. + * * region [String] set when a networking error occurs to easily + * identify the region of the request. + * + * @!attribute requestId + * @readonly + * @!group Data Properties + * @return [String] the unique request ID associated with the response. + * Log this value when debugging requests for AWS support. + * + * @!attribute retryCount + * @readonly + * @!group Operation Properties + * @return [Integer] the number of retries that were + * attempted before the request was completed. + * + * @!attribute redirectCount + * @readonly + * @!group Operation Properties + * @return [Integer] the number of redirects that were + * followed before the request was completed. + * + * @!attribute httpResponse + * @readonly + * @!group HTTP Properties + * @return [AWS.HttpResponse] the raw HTTP response object + * containing the response headers and body information + * from the server. + * + * @see AWS.Request + */ +AWS.Response = inherit({ + + /** + * @api private + */ + constructor: function Response(request) { + this.request = request; + this.data = null; + this.error = null; + this.retryCount = 0; + this.redirectCount = 0; + this.httpResponse = new AWS.HttpResponse(); + if (request) { + this.maxRetries = request.service.numRetries(); + this.maxRedirects = request.service.config.maxRedirects; + } + }, + + /** + * Creates a new request for the next page of response data, calling the + * callback with the page data if a callback is provided. + * + * @callback callback function(err, data) + * Called when a page of data is returned from the next request. + * + * @param err [Error] an error object, if an error occurred in the request + * @param data [Object] the next page of data, or null, if there are no + * more pages left. + * @return [AWS.Request] the request object for the next page of data + * @return [null] if no callback is provided and there are no pages left + * to retrieve. + * @since v1.4.0 + */ + nextPage: function nextPage(callback) { + var config; + var service = this.request.service; + var operation = this.request.operation; + try { + config = service.paginationConfig(operation, true); + } catch (e) { this.error = e; } + + if (!this.hasNextPage()) { + if (callback) callback(this.error, null); + else if (this.error) throw this.error; + return null; + } + + var params = AWS.util.copy(this.request.params); + if (!this.nextPageTokens) { + return callback ? callback(null, null) : null; + } else { + var inputTokens = config.inputToken; + if (typeof inputTokens === 'string') inputTokens = [inputTokens]; + for (var i = 0; i < inputTokens.length; i++) { + params[inputTokens[i]] = this.nextPageTokens[i]; + } + return service.makeRequest(this.request.operation, params, callback); + } + }, + + /** + * @return [Boolean] whether more pages of data can be returned by further + * requests + * @since v1.4.0 + */ + hasNextPage: function hasNextPage() { + this.cacheNextPageTokens(); + if (this.nextPageTokens) return true; + if (this.nextPageTokens === undefined) return undefined; + else return false; + }, + + /** + * @api private + */ + cacheNextPageTokens: function cacheNextPageTokens() { + if (Object.prototype.hasOwnProperty.call(this, 'nextPageTokens')) return this.nextPageTokens; + this.nextPageTokens = undefined; + + var config = this.request.service.paginationConfig(this.request.operation); + if (!config) return this.nextPageTokens; + + this.nextPageTokens = null; + if (config.moreResults) { + if (!jmespath.search(this.data, config.moreResults)) { + return this.nextPageTokens; + } + } + + var exprs = config.outputToken; + if (typeof exprs === 'string') exprs = [exprs]; + AWS.util.arrayEach.call(this, exprs, function (expr) { + var output = jmespath.search(this.data, expr); + if (output) { + this.nextPageTokens = this.nextPageTokens || []; + this.nextPageTokens.push(output); + } + }); + + return this.nextPageTokens; + } + +}); + + +/***/ }), + +/***/ 81600: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var byteLength = AWS.util.string.byteLength; +var Buffer = AWS.util.Buffer; + +/** + * The managed uploader allows for easy and efficient uploading of buffers, + * blobs, or streams, using a configurable amount of concurrency to perform + * multipart uploads where possible. This abstraction also enables uploading + * streams of unknown size due to the use of multipart uploads. + * + * To construct a managed upload object, see the {constructor} function. + * + * ## Tracking upload progress + * + * The managed upload object can also track progress by attaching an + * 'httpUploadProgress' listener to the upload manager. This event is similar + * to {AWS.Request~httpUploadProgress} but groups all concurrent upload progress + * into a single event. See {AWS.S3.ManagedUpload~httpUploadProgress} for more + * information. + * + * ## Handling Multipart Cleanup + * + * By default, this class will automatically clean up any multipart uploads + * when an individual part upload fails. This behavior can be disabled in order + * to manually handle failures by setting the `leavePartsOnError` configuration + * option to `true` when initializing the upload object. + * + * @!event httpUploadProgress(progress) + * Triggered when the uploader has uploaded more data. + * @note The `total` property may not be set if the stream being uploaded has + * not yet finished chunking. In this case the `total` will be undefined + * until the total stream size is known. + * @note This event will not be emitted in Node.js 0.8.x. + * @param progress [map] An object containing the `loaded` and `total` bytes + * of the request and the `key` of the S3 object. Note that `total` may be undefined until the payload + * size is known. + * @context (see AWS.Request~send) + */ +AWS.S3.ManagedUpload = AWS.util.inherit({ + /** + * Creates a managed upload object with a set of configuration options. + * + * @note A "Body" parameter is required to be set prior to calling {send}. + * @note In Node.js, sending "Body" as {https://nodejs.org/dist/latest/docs/api/stream.html#stream_object_mode object-mode stream} + * may result in upload hangs. Using buffer stream is preferable. + * @option options params [map] a map of parameters to pass to the upload + * requests. The "Body" parameter is required to be specified either on + * the service or in the params option. + * @note ContentMD5 should not be provided when using the managed upload object. + * Instead, setting "computeChecksums" to true will enable automatic ContentMD5 generation + * by the managed upload object. + * @option options queueSize [Number] (4) the size of the concurrent queue + * manager to upload parts in parallel. Set to 1 for synchronous uploading + * of parts. Note that the uploader will buffer at most queueSize * partSize + * bytes into memory at any given time. + * @option options partSize [Number] (5mb) the size in bytes for each + * individual part to be uploaded. Adjust the part size to ensure the number + * of parts does not exceed {maxTotalParts}. See {minPartSize} for the + * minimum allowed part size. + * @option options leavePartsOnError [Boolean] (false) whether to abort the + * multipart upload if an error occurs. Set to true if you want to handle + * failures manually. + * @option options service [AWS.S3] an optional S3 service object to use for + * requests. This object might have bound parameters used by the uploader. + * @option options tags [Array] The tags to apply to the uploaded object. + * Each tag should have a `Key` and `Value` keys. + * @example Creating a default uploader for a stream object + * var upload = new AWS.S3.ManagedUpload({ + * params: {Bucket: 'bucket', Key: 'key', Body: stream} + * }); + * @example Creating an uploader with concurrency of 1 and partSize of 10mb + * var upload = new AWS.S3.ManagedUpload({ + * partSize: 10 * 1024 * 1024, queueSize: 1, + * params: {Bucket: 'bucket', Key: 'key', Body: stream} + * }); + * @example Creating an uploader with tags + * var upload = new AWS.S3.ManagedUpload({ + * params: {Bucket: 'bucket', Key: 'key', Body: stream}, + * tags: [{Key: 'tag1', Value: 'value1'}, {Key: 'tag2', Value: 'value2'}] + * }); + * @see send + */ + constructor: function ManagedUpload(options) { + var self = this; + AWS.SequentialExecutor.call(self); + self.body = null; + self.sliceFn = null; + self.callback = null; + self.parts = {}; + self.completeInfo = []; + self.fillQueue = function() { + self.callback(new Error('Unsupported body payload ' + typeof self.body)); + }; + + self.configure(options); + }, + + /** + * @api private + */ + configure: function configure(options) { + options = options || {}; + this.partSize = this.minPartSize; + + if (options.queueSize) this.queueSize = options.queueSize; + if (options.partSize) this.partSize = options.partSize; + if (options.leavePartsOnError) this.leavePartsOnError = true; + if (options.tags) { + if (!Array.isArray(options.tags)) { + throw new Error('Tags must be specified as an array; ' + + typeof options.tags + ' provided.'); + } + this.tags = options.tags; + } + + if (this.partSize < this.minPartSize) { + throw new Error('partSize must be greater than ' + + this.minPartSize); + } + + this.service = options.service; + this.bindServiceObject(options.params); + this.validateBody(); + this.adjustTotalBytes(); + }, + + /** + * @api private + */ + leavePartsOnError: false, + + /** + * @api private + */ + queueSize: 4, + + /** + * @api private + */ + partSize: null, + + /** + * @readonly + * @return [Number] the minimum number of bytes for an individual part + * upload. + */ + minPartSize: 1024 * 1024 * 5, + + /** + * @readonly + * @return [Number] the maximum allowed number of parts in a multipart upload. + */ + maxTotalParts: 10000, + + /** + * Initiates the managed upload for the payload. + * + * @callback callback function(err, data) + * @param err [Error] an error or null if no error occurred. + * @param data [map] The response data from the successful upload: + * * `Location` (String) the URL of the uploaded object + * * `ETag` (String) the ETag of the uploaded object + * * `Bucket` (String) the bucket to which the object was uploaded + * * `Key` (String) the key to which the object was uploaded + * @example Sending a managed upload object + * var params = {Bucket: 'bucket', Key: 'key', Body: stream}; + * var upload = new AWS.S3.ManagedUpload({params: params}); + * upload.send(function(err, data) { + * console.log(err, data); + * }); + */ + send: function(callback) { + var self = this; + self.failed = false; + self.callback = callback || function(err) { if (err) throw err; }; + + var runFill = true; + if (self.sliceFn) { + self.fillQueue = self.fillBuffer; + } else if (AWS.util.isNode()) { + var Stream = AWS.util.stream.Stream; + if (self.body instanceof Stream) { + runFill = false; + self.fillQueue = self.fillStream; + self.partBuffers = []; + self.body. + on('error', function(err) { self.cleanup(err); }). + on('readable', function() { self.fillQueue(); }). + on('end', function() { + self.isDoneChunking = true; + self.numParts = self.totalPartNumbers; + self.fillQueue.call(self); + + if (self.isDoneChunking && self.totalPartNumbers >= 1 && self.doneParts === self.numParts) { + self.finishMultiPart(); + } + }); + } + } + + if (runFill) self.fillQueue.call(self); + }, + + /** + * @!method promise() + * Returns a 'thenable' promise. + * + * Two callbacks can be provided to the `then` method on the returned promise. + * The first callback will be called if the promise is fulfilled, and the second + * callback will be called if the promise is rejected. + * @callback fulfilledCallback function(data) + * Called if the promise is fulfilled. + * @param data [map] The response data from the successful upload: + * `Location` (String) the URL of the uploaded object + * `ETag` (String) the ETag of the uploaded object + * `Bucket` (String) the bucket to which the object was uploaded + * `Key` (String) the key to which the object was uploaded + * @callback rejectedCallback function(err) + * Called if the promise is rejected. + * @param err [Error] an error or null if no error occurred. + * @return [Promise] A promise that represents the state of the upload request. + * @example Sending an upload request using promises. + * var upload = s3.upload({Bucket: 'bucket', Key: 'key', Body: stream}); + * var promise = upload.promise(); + * promise.then(function(data) { ... }, function(err) { ... }); + */ + + /** + * Aborts a managed upload, including all concurrent upload requests. + * @note By default, calling this function will cleanup a multipart upload + * if one was created. To leave the multipart upload around after aborting + * a request, configure `leavePartsOnError` to `true` in the {constructor}. + * @note Calling {abort} in the browser environment will not abort any requests + * that are already in flight. If a multipart upload was created, any parts + * not yet uploaded will not be sent, and the multipart upload will be cleaned up. + * @example Aborting an upload + * var params = { + * Bucket: 'bucket', Key: 'key', + * Body: Buffer.alloc(1024 * 1024 * 25) // 25MB payload + * }; + * var upload = s3.upload(params); + * upload.send(function (err, data) { + * if (err) console.log("Error:", err.code, err.message); + * else console.log(data); + * }); + * + * // abort request in 1 second + * setTimeout(upload.abort.bind(upload), 1000); + */ + abort: function() { + var self = this; + //abort putObject request + if (self.isDoneChunking === true && self.totalPartNumbers === 1 && self.singlePart) { + self.singlePart.abort(); + } else { + self.cleanup(AWS.util.error(new Error('Request aborted by user'), { + code: 'RequestAbortedError', retryable: false + })); + } + }, + + /** + * @api private + */ + validateBody: function validateBody() { + var self = this; + self.body = self.service.config.params.Body; + if (typeof self.body === 'string') { + self.body = AWS.util.buffer.toBuffer(self.body); + } else if (!self.body) { + throw new Error('params.Body is required'); + } + self.sliceFn = AWS.util.arraySliceFn(self.body); + }, + + /** + * @api private + */ + bindServiceObject: function bindServiceObject(params) { + params = params || {}; + var self = this; + // bind parameters to new service object + if (!self.service) { + self.service = new AWS.S3({params: params}); + } else { + // Create a new S3 client from the supplied client's constructor. + var service = self.service; + var config = AWS.util.copy(service.config); + config.signatureVersion = service.getSignatureVersion(); + self.service = new service.constructor.__super__(config); + self.service.config.params = + AWS.util.merge(self.service.config.params || {}, params); + Object.defineProperty(self.service, '_originalConfig', { + get: function() { return service._originalConfig; }, + enumerable: false, + configurable: true + }); + } + }, + + /** + * @api private + */ + adjustTotalBytes: function adjustTotalBytes() { + var self = this; + try { // try to get totalBytes + self.totalBytes = byteLength(self.body); + } catch (e) { } + + // try to adjust partSize if we know payload length + if (self.totalBytes) { + var newPartSize = Math.ceil(self.totalBytes / self.maxTotalParts); + if (newPartSize > self.partSize) self.partSize = newPartSize; + } else { + self.totalBytes = undefined; + } + }, + + /** + * @api private + */ + isDoneChunking: false, + + /** + * @api private + */ + partPos: 0, + + /** + * @api private + */ + totalChunkedBytes: 0, + + /** + * @api private + */ + totalUploadedBytes: 0, + + /** + * @api private + */ + totalBytes: undefined, + + /** + * @api private + */ + numParts: 0, + + /** + * @api private + */ + totalPartNumbers: 0, + + /** + * @api private + */ + activeParts: 0, + + /** + * @api private + */ + doneParts: 0, + + /** + * @api private + */ + parts: null, + + /** + * @api private + */ + completeInfo: null, + + /** + * @api private + */ + failed: false, + + /** + * @api private + */ + multipartReq: null, + + /** + * @api private + */ + partBuffers: null, + + /** + * @api private + */ + partBufferLength: 0, + + /** + * @api private + */ + fillBuffer: function fillBuffer() { + var self = this; + var bodyLen = byteLength(self.body); + + if (bodyLen === 0) { + self.isDoneChunking = true; + self.numParts = 1; + self.nextChunk(self.body); + return; + } + + while (self.activeParts < self.queueSize && self.partPos < bodyLen) { + var endPos = Math.min(self.partPos + self.partSize, bodyLen); + var buf = self.sliceFn.call(self.body, self.partPos, endPos); + self.partPos += self.partSize; + + if (byteLength(buf) < self.partSize || self.partPos === bodyLen) { + self.isDoneChunking = true; + self.numParts = self.totalPartNumbers + 1; + } + self.nextChunk(buf); + } + }, + + /** + * @api private + */ + fillStream: function fillStream() { + var self = this; + if (self.activeParts >= self.queueSize) return; + + var buf = self.body.read(self.partSize - self.partBufferLength) || + self.body.read(); + if (buf) { + self.partBuffers.push(buf); + self.partBufferLength += buf.length; + self.totalChunkedBytes += buf.length; + } + + if (self.partBufferLength >= self.partSize) { + // if we have single buffer we avoid copyfull concat + var pbuf = self.partBuffers.length === 1 ? + self.partBuffers[0] : Buffer.concat(self.partBuffers); + self.partBuffers = []; + self.partBufferLength = 0; + + // if we have more than partSize, push the rest back on the queue + if (pbuf.length > self.partSize) { + var rest = pbuf.slice(self.partSize); + self.partBuffers.push(rest); + self.partBufferLength += rest.length; + pbuf = pbuf.slice(0, self.partSize); + } + + self.nextChunk(pbuf); + } + + if (self.isDoneChunking && !self.isDoneSending) { + // if we have single buffer we avoid copyfull concat + pbuf = self.partBuffers.length === 1 ? + self.partBuffers[0] : Buffer.concat(self.partBuffers); + self.partBuffers = []; + self.partBufferLength = 0; + self.totalBytes = self.totalChunkedBytes; + self.isDoneSending = true; + + if (self.numParts === 0 || pbuf.length > 0) { + self.numParts++; + self.nextChunk(pbuf); + } + } + + self.body.read(0); + }, + + /** + * @api private + */ + nextChunk: function nextChunk(chunk) { + var self = this; + if (self.failed) return null; + + var partNumber = ++self.totalPartNumbers; + if (self.isDoneChunking && partNumber === 1) { + var params = {Body: chunk}; + if (this.tags) { + params.Tagging = this.getTaggingHeader(); + } + var req = self.service.putObject(params); + req._managedUpload = self; + req.on('httpUploadProgress', self.progress).send(self.finishSinglePart); + self.singlePart = req; //save the single part request + return null; + } else if (self.service.config.params.ContentMD5) { + var err = AWS.util.error(new Error('The Content-MD5 you specified is invalid for multi-part uploads.'), { + code: 'InvalidDigest', retryable: false + }); + + self.cleanup(err); + return null; + } + + if (self.completeInfo[partNumber] && self.completeInfo[partNumber].ETag !== null) { + return null; // Already uploaded this part. + } + + self.activeParts++; + if (!self.service.config.params.UploadId) { + + if (!self.multipartReq) { // create multipart + self.multipartReq = self.service.createMultipartUpload(); + self.multipartReq.on('success', function(resp) { + self.service.config.params.UploadId = resp.data.UploadId; + self.multipartReq = null; + }); + self.queueChunks(chunk, partNumber); + self.multipartReq.on('error', function(err) { + self.cleanup(err); + }); + self.multipartReq.send(); + } else { + self.queueChunks(chunk, partNumber); + } + } else { // multipart is created, just send + self.uploadPart(chunk, partNumber); + } + }, + + /** + * @api private + */ + getTaggingHeader: function getTaggingHeader() { + var kvPairStrings = []; + for (var i = 0; i < this.tags.length; i++) { + kvPairStrings.push(AWS.util.uriEscape(this.tags[i].Key) + '=' + + AWS.util.uriEscape(this.tags[i].Value)); + } + + return kvPairStrings.join('&'); + }, + + /** + * @api private + */ + uploadPart: function uploadPart(chunk, partNumber) { + var self = this; + + var partParams = { + Body: chunk, + ContentLength: AWS.util.string.byteLength(chunk), + PartNumber: partNumber + }; + + var partInfo = {ETag: null, PartNumber: partNumber}; + self.completeInfo[partNumber] = partInfo; + + var req = self.service.uploadPart(partParams); + self.parts[partNumber] = req; + req._lastUploadedBytes = 0; + req._managedUpload = self; + req.on('httpUploadProgress', self.progress); + req.send(function(err, data) { + delete self.parts[partParams.PartNumber]; + self.activeParts--; + + if (!err && (!data || !data.ETag)) { + var message = 'No access to ETag property on response.'; + if (AWS.util.isBrowser()) { + message += ' Check CORS configuration to expose ETag header.'; + } + + err = AWS.util.error(new Error(message), { + code: 'ETagMissing', retryable: false + }); + } + if (err) return self.cleanup(err); + //prevent sending part being returned twice (https://github.com/aws/aws-sdk-js/issues/2304) + if (self.completeInfo[partNumber] && self.completeInfo[partNumber].ETag !== null) return null; + partInfo.ETag = data.ETag; + self.doneParts++; + if (self.isDoneChunking && self.doneParts === self.totalPartNumbers) { + self.finishMultiPart(); + } else { + self.fillQueue.call(self); + } + }); + }, + + /** + * @api private + */ + queueChunks: function queueChunks(chunk, partNumber) { + var self = this; + self.multipartReq.on('success', function() { + self.uploadPart(chunk, partNumber); + }); + }, + + /** + * @api private + */ + cleanup: function cleanup(err) { + var self = this; + if (self.failed) return; + + // clean up stream + if (typeof self.body.removeAllListeners === 'function' && + typeof self.body.resume === 'function') { + self.body.removeAllListeners('readable'); + self.body.removeAllListeners('end'); + self.body.resume(); + } + + // cleanup multipartReq listeners + if (self.multipartReq) { + self.multipartReq.removeAllListeners('success'); + self.multipartReq.removeAllListeners('error'); + self.multipartReq.removeAllListeners('complete'); + delete self.multipartReq; + } + + if (self.service.config.params.UploadId && !self.leavePartsOnError) { + self.service.abortMultipartUpload().send(); + } else if (self.leavePartsOnError) { + self.isDoneChunking = false; + } + + AWS.util.each(self.parts, function(partNumber, part) { + part.removeAllListeners('complete'); + part.abort(); + }); + + self.activeParts = 0; + self.partPos = 0; + self.numParts = 0; + self.totalPartNumbers = 0; + self.parts = {}; + self.failed = true; + self.callback(err); + }, + + /** + * @api private + */ + finishMultiPart: function finishMultiPart() { + var self = this; + var completeParams = { MultipartUpload: { Parts: self.completeInfo.slice(1) } }; + self.service.completeMultipartUpload(completeParams, function(err, data) { + if (err) { + return self.cleanup(err); + } + + if (data && typeof data.Location === 'string') { + data.Location = data.Location.replace(/%2F/g, '/'); + } + + if (Array.isArray(self.tags)) { + for (var i = 0; i < self.tags.length; i++) { + self.tags[i].Value = String(self.tags[i].Value); + } + self.service.putObjectTagging( + {Tagging: {TagSet: self.tags}}, + function(e, d) { + if (e) { + self.callback(e); + } else { + self.callback(e, data); + } + } + ); + } else { + self.callback(err, data); + } + }); + }, + + /** + * @api private + */ + finishSinglePart: function finishSinglePart(err, data) { + var upload = this.request._managedUpload; + var httpReq = this.request.httpRequest; + var endpoint = httpReq.endpoint; + if (err) return upload.callback(err); + data.Location = + [endpoint.protocol, '//', endpoint.host, httpReq.path].join(''); + data.key = this.request.params.Key; // will stay undocumented + data.Key = this.request.params.Key; + data.Bucket = this.request.params.Bucket; + upload.callback(err, data); + }, + + /** + * @api private + */ + progress: function progress(info) { + var upload = this._managedUpload; + if (this.operation === 'putObject') { + info.part = 1; + info.key = this.params.Key; + } else { + upload.totalUploadedBytes += info.loaded - this._lastUploadedBytes; + this._lastUploadedBytes = info.loaded; + info = { + loaded: upload.totalUploadedBytes, + total: upload.totalBytes, + part: this.params.PartNumber, + key: this.params.Key + }; + } + upload.emit('httpUploadProgress', [info]); + } +}); + +AWS.util.mixin(AWS.S3.ManagedUpload, AWS.SequentialExecutor); + +/** + * @api private + */ +AWS.S3.ManagedUpload.addPromisesToClass = function addPromisesToClass(PromiseDependency) { + this.prototype.promise = AWS.util.promisifyMethod('send', PromiseDependency); +}; + +/** + * @api private + */ +AWS.S3.ManagedUpload.deletePromisesFromClass = function deletePromisesFromClass() { + delete this.prototype.promise; +}; + +AWS.util.addPromises(AWS.S3.ManagedUpload); + +/** + * @api private + */ +module.exports = AWS.S3.ManagedUpload; + + +/***/ }), + +/***/ 55948: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +/** + * @api private + * @!method on(eventName, callback) + * Registers an event listener callback for the event given by `eventName`. + * Parameters passed to the callback function depend on the individual event + * being triggered. See the event documentation for those parameters. + * + * @param eventName [String] the event name to register the listener for + * @param callback [Function] the listener callback function + * @param toHead [Boolean] attach the listener callback to the head of callback array if set to true. + * Default to be false. + * @return [AWS.SequentialExecutor] the same object for chaining + */ +AWS.SequentialExecutor = AWS.util.inherit({ + + constructor: function SequentialExecutor() { + this._events = {}; + }, + + /** + * @api private + */ + listeners: function listeners(eventName) { + return this._events[eventName] ? this._events[eventName].slice(0) : []; + }, + + on: function on(eventName, listener, toHead) { + if (this._events[eventName]) { + toHead ? + this._events[eventName].unshift(listener) : + this._events[eventName].push(listener); + } else { + this._events[eventName] = [listener]; + } + return this; + }, + + onAsync: function onAsync(eventName, listener, toHead) { + listener._isAsync = true; + return this.on(eventName, listener, toHead); + }, + + removeListener: function removeListener(eventName, listener) { + var listeners = this._events[eventName]; + if (listeners) { + var length = listeners.length; + var position = -1; + for (var i = 0; i < length; ++i) { + if (listeners[i] === listener) { + position = i; + } + } + if (position > -1) { + listeners.splice(position, 1); + } + } + return this; + }, + + removeAllListeners: function removeAllListeners(eventName) { + if (eventName) { + delete this._events[eventName]; + } else { + this._events = {}; + } + return this; + }, + + /** + * @api private + */ + emit: function emit(eventName, eventArgs, doneCallback) { + if (!doneCallback) doneCallback = function() { }; + var listeners = this.listeners(eventName); + var count = listeners.length; + this.callListeners(listeners, eventArgs, doneCallback); + return count > 0; + }, + + /** + * @api private + */ + callListeners: function callListeners(listeners, args, doneCallback, prevError) { + var self = this; + var error = prevError || null; + + function callNextListener(err) { + if (err) { + error = AWS.util.error(error || new Error(), err); + if (self._haltHandlersOnError) { + return doneCallback.call(self, error); + } + } + self.callListeners(listeners, args, doneCallback, error); + } + + while (listeners.length > 0) { + var listener = listeners.shift(); + if (listener._isAsync) { // asynchronous listener + listener.apply(self, args.concat([callNextListener])); + return; // stop here, callNextListener will continue + } else { // synchronous listener + try { + listener.apply(self, args); + } catch (err) { + error = AWS.util.error(error || new Error(), err); + } + if (error && self._haltHandlersOnError) { + doneCallback.call(self, error); + return; + } + } + } + doneCallback.call(self, error); + }, + + /** + * Adds or copies a set of listeners from another list of + * listeners or SequentialExecutor object. + * + * @param listeners [map>, AWS.SequentialExecutor] + * a list of events and callbacks, or an event emitter object + * containing listeners to add to this emitter object. + * @return [AWS.SequentialExecutor] the emitter object, for chaining. + * @example Adding listeners from a map of listeners + * emitter.addListeners({ + * event1: [function() { ... }, function() { ... }], + * event2: [function() { ... }] + * }); + * emitter.emit('event1'); // emitter has event1 + * emitter.emit('event2'); // emitter has event2 + * @example Adding listeners from another emitter object + * var emitter1 = new AWS.SequentialExecutor(); + * emitter1.on('event1', function() { ... }); + * emitter1.on('event2', function() { ... }); + * var emitter2 = new AWS.SequentialExecutor(); + * emitter2.addListeners(emitter1); + * emitter2.emit('event1'); // emitter2 has event1 + * emitter2.emit('event2'); // emitter2 has event2 + */ + addListeners: function addListeners(listeners) { + var self = this; + + // extract listeners if parameter is an SequentialExecutor object + if (listeners._events) listeners = listeners._events; + + AWS.util.each(listeners, function(event, callbacks) { + if (typeof callbacks === 'function') callbacks = [callbacks]; + AWS.util.arrayEach(callbacks, function(callback) { + self.on(event, callback); + }); + }); + + return self; + }, + + /** + * Registers an event with {on} and saves the callback handle function + * as a property on the emitter object using a given `name`. + * + * @param name [String] the property name to set on this object containing + * the callback function handle so that the listener can be removed in + * the future. + * @param (see on) + * @return (see on) + * @example Adding a named listener DATA_CALLBACK + * var listener = function() { doSomething(); }; + * emitter.addNamedListener('DATA_CALLBACK', 'data', listener); + * + * // the following prints: true + * console.log(emitter.DATA_CALLBACK == listener); + */ + addNamedListener: function addNamedListener(name, eventName, callback, toHead) { + this[name] = callback; + this.addListener(eventName, callback, toHead); + return this; + }, + + /** + * @api private + */ + addNamedAsyncListener: function addNamedAsyncListener(name, eventName, callback, toHead) { + callback._isAsync = true; + return this.addNamedListener(name, eventName, callback, toHead); + }, + + /** + * Helper method to add a set of named listeners using + * {addNamedListener}. The callback contains a parameter + * with a handle to the `addNamedListener` method. + * + * @callback callback function(add) + * The callback function is called immediately in order to provide + * the `add` function to the block. This simplifies the addition of + * a large group of named listeners. + * @param add [Function] the {addNamedListener} function to call + * when registering listeners. + * @example Adding a set of named listeners + * emitter.addNamedListeners(function(add) { + * add('DATA_CALLBACK', 'data', function() { ... }); + * add('OTHER', 'otherEvent', function() { ... }); + * add('LAST', 'lastEvent', function() { ... }); + * }); + * + * // these properties are now set: + * emitter.DATA_CALLBACK; + * emitter.OTHER; + * emitter.LAST; + */ + addNamedListeners: function addNamedListeners(callback) { + var self = this; + callback( + function() { + self.addNamedListener.apply(self, arguments); + }, + function() { + self.addNamedAsyncListener.apply(self, arguments); + } + ); + return this; + } +}); + +/** + * {on} is the prefered method. + * @api private + */ +AWS.SequentialExecutor.prototype.addListener = AWS.SequentialExecutor.prototype.on; + +/** + * @api private + */ +module.exports = AWS.SequentialExecutor; + + +/***/ }), + +/***/ 68903: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var Api = __nccwpck_require__(17657); +var regionConfig = __nccwpck_require__(18262); + +var inherit = AWS.util.inherit; +var clientCount = 0; +var region_utils = __nccwpck_require__(99517); + +/** + * The service class representing an AWS service. + * + * @class_abstract This class is an abstract class. + * + * @!attribute apiVersions + * @return [Array] the list of API versions supported by this service. + * @readonly + */ +AWS.Service = inherit({ + /** + * Create a new service object with a configuration object + * + * @param config [map] a map of configuration options + */ + constructor: function Service(config) { + if (!this.loadServiceClass) { + throw AWS.util.error(new Error(), + 'Service must be constructed with `new\' operator'); + } + + if (config) { + if (config.region) { + var region = config.region; + if (region_utils.isFipsRegion(region)) { + config.region = region_utils.getRealRegion(region); + config.useFipsEndpoint = true; + } + if (region_utils.isGlobalRegion(region)) { + config.region = region_utils.getRealRegion(region); + } + } + if (typeof config.useDualstack === 'boolean' + && typeof config.useDualstackEndpoint !== 'boolean') { + config.useDualstackEndpoint = config.useDualstack; + } + } + + var ServiceClass = this.loadServiceClass(config || {}); + if (ServiceClass) { + var originalConfig = AWS.util.copy(config); + var svc = new ServiceClass(config); + Object.defineProperty(svc, '_originalConfig', { + get: function() { return originalConfig; }, + enumerable: false, + configurable: true + }); + svc._clientId = ++clientCount; + return svc; + } + this.initialize(config); + }, + + /** + * @api private + */ + initialize: function initialize(config) { + var svcConfig = AWS.config[this.serviceIdentifier]; + this.config = new AWS.Config(AWS.config); + if (svcConfig) this.config.update(svcConfig, true); + if (config) this.config.update(config, true); + + this.validateService(); + if (!this.config.endpoint) regionConfig.configureEndpoint(this); + + this.config.endpoint = this.endpointFromTemplate(this.config.endpoint); + this.setEndpoint(this.config.endpoint); + //enable attaching listeners to service client + AWS.SequentialExecutor.call(this); + AWS.Service.addDefaultMonitoringListeners(this); + if ((this.config.clientSideMonitoring || AWS.Service._clientSideMonitoring) && this.publisher) { + var publisher = this.publisher; + this.addNamedListener('PUBLISH_API_CALL', 'apiCall', function PUBLISH_API_CALL(event) { + process.nextTick(function() {publisher.eventHandler(event);}); + }); + this.addNamedListener('PUBLISH_API_ATTEMPT', 'apiCallAttempt', function PUBLISH_API_ATTEMPT(event) { + process.nextTick(function() {publisher.eventHandler(event);}); + }); + } + }, + + /** + * @api private + */ + validateService: function validateService() { + }, + + /** + * @api private + */ + loadServiceClass: function loadServiceClass(serviceConfig) { + var config = serviceConfig; + if (!AWS.util.isEmpty(this.api)) { + return null; + } else if (config.apiConfig) { + return AWS.Service.defineServiceApi(this.constructor, config.apiConfig); + } else if (!this.constructor.services) { + return null; + } else { + config = new AWS.Config(AWS.config); + config.update(serviceConfig, true); + var version = config.apiVersions[this.constructor.serviceIdentifier]; + version = version || config.apiVersion; + return this.getLatestServiceClass(version); + } + }, + + /** + * @api private + */ + getLatestServiceClass: function getLatestServiceClass(version) { + version = this.getLatestServiceVersion(version); + if (this.constructor.services[version] === null) { + AWS.Service.defineServiceApi(this.constructor, version); + } + + return this.constructor.services[version]; + }, + + /** + * @api private + */ + getLatestServiceVersion: function getLatestServiceVersion(version) { + if (!this.constructor.services || this.constructor.services.length === 0) { + throw new Error('No services defined on ' + + this.constructor.serviceIdentifier); + } + + if (!version) { + version = 'latest'; + } else if (AWS.util.isType(version, Date)) { + version = AWS.util.date.iso8601(version).split('T')[0]; + } + + if (Object.hasOwnProperty(this.constructor.services, version)) { + return version; + } + + var keys = Object.keys(this.constructor.services).sort(); + var selectedVersion = null; + for (var i = keys.length - 1; i >= 0; i--) { + // versions that end in "*" are not available on disk and can be + // skipped, so do not choose these as selectedVersions + if (keys[i][keys[i].length - 1] !== '*') { + selectedVersion = keys[i]; + } + if (keys[i].substr(0, 10) <= version) { + return selectedVersion; + } + } + + throw new Error('Could not find ' + this.constructor.serviceIdentifier + + ' API to satisfy version constraint `' + version + '\''); + }, + + /** + * @api private + */ + api: {}, + + /** + * @api private + */ + defaultRetryCount: 3, + + /** + * @api private + */ + customizeRequests: function customizeRequests(callback) { + if (!callback) { + this.customRequestHandler = null; + } else if (typeof callback === 'function') { + this.customRequestHandler = callback; + } else { + throw new Error('Invalid callback type \'' + typeof callback + '\' provided in customizeRequests'); + } + }, + + /** + * Calls an operation on a service with the given input parameters. + * + * @param operation [String] the name of the operation to call on the service. + * @param params [map] a map of input options for the operation + * @callback callback function(err, data) + * If a callback is supplied, it is called when a response is returned + * from the service. + * @param err [Error] the error object returned from the request. + * Set to `null` if the request is successful. + * @param data [Object] the de-serialized data returned from + * the request. Set to `null` if a request error occurs. + */ + makeRequest: function makeRequest(operation, params, callback) { + if (typeof params === 'function') { + callback = params; + params = null; + } + + params = params || {}; + if (this.config.params) { // copy only toplevel bound params + var rules = this.api.operations[operation]; + if (rules) { + params = AWS.util.copy(params); + AWS.util.each(this.config.params, function(key, value) { + if (rules.input.members[key]) { + if (params[key] === undefined || params[key] === null) { + params[key] = value; + } + } + }); + } + } + + var request = new AWS.Request(this, operation, params); + this.addAllRequestListeners(request); + this.attachMonitoringEmitter(request); + if (callback) request.send(callback); + return request; + }, + + /** + * Calls an operation on a service with the given input parameters, without + * any authentication data. This method is useful for "public" API operations. + * + * @param operation [String] the name of the operation to call on the service. + * @param params [map] a map of input options for the operation + * @callback callback function(err, data) + * If a callback is supplied, it is called when a response is returned + * from the service. + * @param err [Error] the error object returned from the request. + * Set to `null` if the request is successful. + * @param data [Object] the de-serialized data returned from + * the request. Set to `null` if a request error occurs. + */ + makeUnauthenticatedRequest: function makeUnauthenticatedRequest(operation, params, callback) { + if (typeof params === 'function') { + callback = params; + params = {}; + } + + var request = this.makeRequest(operation, params).toUnauthenticated(); + return callback ? request.send(callback) : request; + }, + + /** + * Waits for a given state + * + * @param state [String] the state on the service to wait for + * @param params [map] a map of parameters to pass with each request + * @option params $waiter [map] a map of configuration options for the waiter + * @option params $waiter.delay [Number] The number of seconds to wait between + * requests + * @option params $waiter.maxAttempts [Number] The maximum number of requests + * to send while waiting + * @callback callback function(err, data) + * If a callback is supplied, it is called when a response is returned + * from the service. + * @param err [Error] the error object returned from the request. + * Set to `null` if the request is successful. + * @param data [Object] the de-serialized data returned from + * the request. Set to `null` if a request error occurs. + */ + waitFor: function waitFor(state, params, callback) { + var waiter = new AWS.ResourceWaiter(this, state); + return waiter.wait(params, callback); + }, + + /** + * @api private + */ + addAllRequestListeners: function addAllRequestListeners(request) { + var list = [AWS.events, AWS.EventListeners.Core, this.serviceInterface(), + AWS.EventListeners.CorePost]; + for (var i = 0; i < list.length; i++) { + if (list[i]) request.addListeners(list[i]); + } + + // disable parameter validation + if (!this.config.paramValidation) { + request.removeListener('validate', + AWS.EventListeners.Core.VALIDATE_PARAMETERS); + } + + if (this.config.logger) { // add logging events + request.addListeners(AWS.EventListeners.Logger); + } + + this.setupRequestListeners(request); + // call prototype's customRequestHandler + if (typeof this.constructor.prototype.customRequestHandler === 'function') { + this.constructor.prototype.customRequestHandler(request); + } + // call instance's customRequestHandler + if (Object.prototype.hasOwnProperty.call(this, 'customRequestHandler') && typeof this.customRequestHandler === 'function') { + this.customRequestHandler(request); + } + }, + + /** + * Event recording metrics for a whole API call. + * @returns {object} a subset of api call metrics + * @api private + */ + apiCallEvent: function apiCallEvent(request) { + var api = request.service.api.operations[request.operation]; + var monitoringEvent = { + Type: 'ApiCall', + Api: api ? api.name : request.operation, + Version: 1, + Service: request.service.api.serviceId || request.service.api.endpointPrefix, + Region: request.httpRequest.region, + MaxRetriesExceeded: 0, + UserAgent: request.httpRequest.getUserAgent(), + }; + var response = request.response; + if (response.httpResponse.statusCode) { + monitoringEvent.FinalHttpStatusCode = response.httpResponse.statusCode; + } + if (response.error) { + var error = response.error; + var statusCode = response.httpResponse.statusCode; + if (statusCode > 299) { + if (error.code) monitoringEvent.FinalAwsException = error.code; + if (error.message) monitoringEvent.FinalAwsExceptionMessage = error.message; + } else { + if (error.code || error.name) monitoringEvent.FinalSdkException = error.code || error.name; + if (error.message) monitoringEvent.FinalSdkExceptionMessage = error.message; + } + } + return monitoringEvent; + }, + + /** + * Event recording metrics for an API call attempt. + * @returns {object} a subset of api call attempt metrics + * @api private + */ + apiAttemptEvent: function apiAttemptEvent(request) { + var api = request.service.api.operations[request.operation]; + var monitoringEvent = { + Type: 'ApiCallAttempt', + Api: api ? api.name : request.operation, + Version: 1, + Service: request.service.api.serviceId || request.service.api.endpointPrefix, + Fqdn: request.httpRequest.endpoint.hostname, + UserAgent: request.httpRequest.getUserAgent(), + }; + var response = request.response; + if (response.httpResponse.statusCode) { + monitoringEvent.HttpStatusCode = response.httpResponse.statusCode; + } + if ( + !request._unAuthenticated && + request.service.config.credentials && + request.service.config.credentials.accessKeyId + ) { + monitoringEvent.AccessKey = request.service.config.credentials.accessKeyId; + } + if (!response.httpResponse.headers) return monitoringEvent; + if (request.httpRequest.headers['x-amz-security-token']) { + monitoringEvent.SessionToken = request.httpRequest.headers['x-amz-security-token']; + } + if (response.httpResponse.headers['x-amzn-requestid']) { + monitoringEvent.XAmznRequestId = response.httpResponse.headers['x-amzn-requestid']; + } + if (response.httpResponse.headers['x-amz-request-id']) { + monitoringEvent.XAmzRequestId = response.httpResponse.headers['x-amz-request-id']; + } + if (response.httpResponse.headers['x-amz-id-2']) { + monitoringEvent.XAmzId2 = response.httpResponse.headers['x-amz-id-2']; + } + return monitoringEvent; + }, + + /** + * Add metrics of failed request. + * @api private + */ + attemptFailEvent: function attemptFailEvent(request) { + var monitoringEvent = this.apiAttemptEvent(request); + var response = request.response; + var error = response.error; + if (response.httpResponse.statusCode > 299 ) { + if (error.code) monitoringEvent.AwsException = error.code; + if (error.message) monitoringEvent.AwsExceptionMessage = error.message; + } else { + if (error.code || error.name) monitoringEvent.SdkException = error.code || error.name; + if (error.message) monitoringEvent.SdkExceptionMessage = error.message; + } + return monitoringEvent; + }, + + /** + * Attach listeners to request object to fetch metrics of each request + * and emit data object through \'ApiCall\' and \'ApiCallAttempt\' events. + * @api private + */ + attachMonitoringEmitter: function attachMonitoringEmitter(request) { + var attemptTimestamp; //timestamp marking the beginning of a request attempt + var attemptStartRealTime; //Start time of request attempt. Used to calculating attemptLatency + var attemptLatency; //latency from request sent out to http response reaching SDK + var callStartRealTime; //Start time of API call. Used to calculating API call latency + var attemptCount = 0; //request.retryCount is not reliable here + var region; //region cache region for each attempt since it can be updated in plase (e.g. s3) + var callTimestamp; //timestamp when the request is created + var self = this; + var addToHead = true; + + request.on('validate', function () { + callStartRealTime = AWS.util.realClock.now(); + callTimestamp = Date.now(); + }, addToHead); + request.on('sign', function () { + attemptStartRealTime = AWS.util.realClock.now(); + attemptTimestamp = Date.now(); + region = request.httpRequest.region; + attemptCount++; + }, addToHead); + request.on('validateResponse', function() { + attemptLatency = Math.round(AWS.util.realClock.now() - attemptStartRealTime); + }); + request.addNamedListener('API_CALL_ATTEMPT', 'success', function API_CALL_ATTEMPT() { + var apiAttemptEvent = self.apiAttemptEvent(request); + apiAttemptEvent.Timestamp = attemptTimestamp; + apiAttemptEvent.AttemptLatency = attemptLatency >= 0 ? attemptLatency : 0; + apiAttemptEvent.Region = region; + self.emit('apiCallAttempt', [apiAttemptEvent]); + }); + request.addNamedListener('API_CALL_ATTEMPT_RETRY', 'retry', function API_CALL_ATTEMPT_RETRY() { + var apiAttemptEvent = self.attemptFailEvent(request); + apiAttemptEvent.Timestamp = attemptTimestamp; + //attemptLatency may not be available if fail before response + attemptLatency = attemptLatency || + Math.round(AWS.util.realClock.now() - attemptStartRealTime); + apiAttemptEvent.AttemptLatency = attemptLatency >= 0 ? attemptLatency : 0; + apiAttemptEvent.Region = region; + self.emit('apiCallAttempt', [apiAttemptEvent]); + }); + request.addNamedListener('API_CALL', 'complete', function API_CALL() { + var apiCallEvent = self.apiCallEvent(request); + apiCallEvent.AttemptCount = attemptCount; + if (apiCallEvent.AttemptCount <= 0) return; + apiCallEvent.Timestamp = callTimestamp; + var latency = Math.round(AWS.util.realClock.now() - callStartRealTime); + apiCallEvent.Latency = latency >= 0 ? latency : 0; + var response = request.response; + if ( + response.error && + response.error.retryable && + typeof response.retryCount === 'number' && + typeof response.maxRetries === 'number' && + (response.retryCount >= response.maxRetries) + ) { + apiCallEvent.MaxRetriesExceeded = 1; + } + self.emit('apiCall', [apiCallEvent]); + }); + }, + + /** + * Override this method to setup any custom request listeners for each + * new request to the service. + * + * @method_abstract This is an abstract method. + */ + setupRequestListeners: function setupRequestListeners(request) { + }, + + /** + * Gets the signing name for a given request + * @api private + */ + getSigningName: function getSigningName() { + return this.api.signingName || this.api.endpointPrefix; + }, + + /** + * Gets the signer class for a given request + * @api private + */ + getSignerClass: function getSignerClass(request) { + var version; + // get operation authtype if present + var operation = null; + var authtype = ''; + if (request) { + var operations = request.service.api.operations || {}; + operation = operations[request.operation] || null; + authtype = operation ? operation.authtype : ''; + } + if (this.config.signatureVersion) { + version = this.config.signatureVersion; + } else if (authtype === 'v4' || authtype === 'v4-unsigned-body') { + version = 'v4'; + } else if (authtype === 'bearer') { + version = 'bearer'; + } else { + version = this.api.signatureVersion; + } + return AWS.Signers.RequestSigner.getVersion(version); + }, + + /** + * @api private + */ + serviceInterface: function serviceInterface() { + switch (this.api.protocol) { + case 'ec2': return AWS.EventListeners.Query; + case 'query': return AWS.EventListeners.Query; + case 'json': return AWS.EventListeners.Json; + case 'rest-json': return AWS.EventListeners.RestJson; + case 'rest-xml': return AWS.EventListeners.RestXml; + } + if (this.api.protocol) { + throw new Error('Invalid service `protocol\' ' + + this.api.protocol + ' in API config'); + } + }, + + /** + * @api private + */ + successfulResponse: function successfulResponse(resp) { + return resp.httpResponse.statusCode < 300; + }, + + /** + * How many times a failed request should be retried before giving up. + * the defaultRetryCount can be overriden by service classes. + * + * @api private + */ + numRetries: function numRetries() { + if (this.config.maxRetries !== undefined) { + return this.config.maxRetries; + } else { + return this.defaultRetryCount; + } + }, + + /** + * @api private + */ + retryDelays: function retryDelays(retryCount, err) { + return AWS.util.calculateRetryDelay(retryCount, this.config.retryDelayOptions, err); + }, + + /** + * @api private + */ + retryableError: function retryableError(error) { + if (this.timeoutError(error)) return true; + if (this.networkingError(error)) return true; + if (this.expiredCredentialsError(error)) return true; + if (this.throttledError(error)) return true; + if (error.statusCode >= 500) return true; + return false; + }, + + /** + * @api private + */ + networkingError: function networkingError(error) { + return error.code === 'NetworkingError'; + }, + + /** + * @api private + */ + timeoutError: function timeoutError(error) { + return error.code === 'TimeoutError'; + }, + + /** + * @api private + */ + expiredCredentialsError: function expiredCredentialsError(error) { + // TODO : this only handles *one* of the expired credential codes + return (error.code === 'ExpiredTokenException'); + }, + + /** + * @api private + */ + clockSkewError: function clockSkewError(error) { + switch (error.code) { + case 'RequestTimeTooSkewed': + case 'RequestExpired': + case 'InvalidSignatureException': + case 'SignatureDoesNotMatch': + case 'AuthFailure': + case 'RequestInTheFuture': + return true; + default: return false; + } + }, + + /** + * @api private + */ + getSkewCorrectedDate: function getSkewCorrectedDate() { + return new Date(Date.now() + this.config.systemClockOffset); + }, + + /** + * @api private + */ + applyClockOffset: function applyClockOffset(newServerTime) { + if (newServerTime) { + this.config.systemClockOffset = newServerTime - Date.now(); + } + }, + + /** + * @api private + */ + isClockSkewed: function isClockSkewed(newServerTime) { + if (newServerTime) { + return Math.abs(this.getSkewCorrectedDate().getTime() - newServerTime) >= 300000; + } + }, + + /** + * @api private + */ + throttledError: function throttledError(error) { + // this logic varies between services + if (error.statusCode === 429) return true; + switch (error.code) { + case 'ProvisionedThroughputExceededException': + case 'Throttling': + case 'ThrottlingException': + case 'RequestLimitExceeded': + case 'RequestThrottled': + case 'RequestThrottledException': + case 'TooManyRequestsException': + case 'TransactionInProgressException': //dynamodb + case 'EC2ThrottledException': + return true; + default: + return false; + } + }, + + /** + * @api private + */ + endpointFromTemplate: function endpointFromTemplate(endpoint) { + if (typeof endpoint !== 'string') return endpoint; + + var e = endpoint; + e = e.replace(/\{service\}/g, this.api.endpointPrefix); + e = e.replace(/\{region\}/g, this.config.region); + e = e.replace(/\{scheme\}/g, this.config.sslEnabled ? 'https' : 'http'); + return e; + }, + + /** + * @api private + */ + setEndpoint: function setEndpoint(endpoint) { + this.endpoint = new AWS.Endpoint(endpoint, this.config); + }, + + /** + * @api private + */ + paginationConfig: function paginationConfig(operation, throwException) { + var paginator = this.api.operations[operation].paginator; + if (!paginator) { + if (throwException) { + var e = new Error(); + throw AWS.util.error(e, 'No pagination configuration for ' + operation); + } + return null; + } + + return paginator; + } +}); + +AWS.util.update(AWS.Service, { + + /** + * Adds one method for each operation described in the api configuration + * + * @api private + */ + defineMethods: function defineMethods(svc) { + AWS.util.each(svc.prototype.api.operations, function iterator(method) { + if (svc.prototype[method]) return; + var operation = svc.prototype.api.operations[method]; + if (operation.authtype === 'none') { + svc.prototype[method] = function (params, callback) { + return this.makeUnauthenticatedRequest(method, params, callback); + }; + } else { + svc.prototype[method] = function (params, callback) { + return this.makeRequest(method, params, callback); + }; + } + }); + }, + + /** + * Defines a new Service class using a service identifier and list of versions + * including an optional set of features (functions) to apply to the class + * prototype. + * + * @param serviceIdentifier [String] the identifier for the service + * @param versions [Array] a list of versions that work with this + * service + * @param features [Object] an object to attach to the prototype + * @return [Class] the service class defined by this function. + */ + defineService: function defineService(serviceIdentifier, versions, features) { + AWS.Service._serviceMap[serviceIdentifier] = true; + if (!Array.isArray(versions)) { + features = versions; + versions = []; + } + + var svc = inherit(AWS.Service, features || {}); + + if (typeof serviceIdentifier === 'string') { + AWS.Service.addVersions(svc, versions); + + var identifier = svc.serviceIdentifier || serviceIdentifier; + svc.serviceIdentifier = identifier; + } else { // defineService called with an API + svc.prototype.api = serviceIdentifier; + AWS.Service.defineMethods(svc); + } + AWS.SequentialExecutor.call(this.prototype); + //util.clientSideMonitoring is only available in node + if (!this.prototype.publisher && AWS.util.clientSideMonitoring) { + var Publisher = AWS.util.clientSideMonitoring.Publisher; + var configProvider = AWS.util.clientSideMonitoring.configProvider; + var publisherConfig = configProvider(); + this.prototype.publisher = new Publisher(publisherConfig); + if (publisherConfig.enabled) { + //if csm is enabled in environment, SDK should send all metrics + AWS.Service._clientSideMonitoring = true; + } + } + AWS.SequentialExecutor.call(svc.prototype); + AWS.Service.addDefaultMonitoringListeners(svc.prototype); + return svc; + }, + + /** + * @api private + */ + addVersions: function addVersions(svc, versions) { + if (!Array.isArray(versions)) versions = [versions]; + + svc.services = svc.services || {}; + for (var i = 0; i < versions.length; i++) { + if (svc.services[versions[i]] === undefined) { + svc.services[versions[i]] = null; + } + } + + svc.apiVersions = Object.keys(svc.services).sort(); + }, + + /** + * @api private + */ + defineServiceApi: function defineServiceApi(superclass, version, apiConfig) { + var svc = inherit(superclass, { + serviceIdentifier: superclass.serviceIdentifier + }); + + function setApi(api) { + if (api.isApi) { + svc.prototype.api = api; + } else { + svc.prototype.api = new Api(api, { + serviceIdentifier: superclass.serviceIdentifier + }); + } + } + + if (typeof version === 'string') { + if (apiConfig) { + setApi(apiConfig); + } else { + try { + setApi(AWS.apiLoader(superclass.serviceIdentifier, version)); + } catch (err) { + throw AWS.util.error(err, { + message: 'Could not find API configuration ' + + superclass.serviceIdentifier + '-' + version + }); + } + } + if (!Object.prototype.hasOwnProperty.call(superclass.services, version)) { + superclass.apiVersions = superclass.apiVersions.concat(version).sort(); + } + superclass.services[version] = svc; + } else { + setApi(version); + } + + AWS.Service.defineMethods(svc); + return svc; + }, + + /** + * @api private + */ + hasService: function(identifier) { + return Object.prototype.hasOwnProperty.call(AWS.Service._serviceMap, identifier); + }, + + /** + * @param attachOn attach default monitoring listeners to object + * + * Each monitoring event should be emitted from service client to service constructor prototype and then + * to global service prototype like bubbling up. These default monitoring events listener will transfer + * the monitoring events to the upper layer. + * @api private + */ + addDefaultMonitoringListeners: function addDefaultMonitoringListeners(attachOn) { + attachOn.addNamedListener('MONITOR_EVENTS_BUBBLE', 'apiCallAttempt', function EVENTS_BUBBLE(event) { + var baseClass = Object.getPrototypeOf(attachOn); + if (baseClass._events) baseClass.emit('apiCallAttempt', [event]); + }); + attachOn.addNamedListener('CALL_EVENTS_BUBBLE', 'apiCall', function CALL_EVENTS_BUBBLE(event) { + var baseClass = Object.getPrototypeOf(attachOn); + if (baseClass._events) baseClass.emit('apiCall', [event]); + }); + }, + + /** + * @api private + */ + _serviceMap: {} +}); + +AWS.util.mixin(AWS.Service, AWS.SequentialExecutor); + +/** + * @api private + */ +module.exports = AWS.Service; + + +/***/ }), + +/***/ 4338: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +AWS.util.update(AWS.APIGateway.prototype, { +/** + * Sets the Accept header to application/json. + * + * @api private + */ + setAcceptHeader: function setAcceptHeader(req) { + var httpRequest = req.httpRequest; + if (!httpRequest.headers.Accept) { + httpRequest.headers['Accept'] = 'application/json'; + } + }, + + /** + * @api private + */ + setupRequestListeners: function setupRequestListeners(request) { + request.addListener('build', this.setAcceptHeader); + if (request.operation === 'getExport') { + var params = request.params || {}; + if (params.exportType === 'swagger') { + request.addListener('extractData', AWS.util.convertPayloadToString); + } + } + } +}); + + + +/***/ }), + +/***/ 95483: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +// pull in CloudFront signer +__nccwpck_require__(93260); + +AWS.util.update(AWS.CloudFront.prototype, { + + setupRequestListeners: function setupRequestListeners(request) { + request.addListener('extractData', AWS.util.hoistPayloadMember); + } + +}); + + +/***/ }), + +/***/ 48571: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +/** + * Constructs a service interface object. Each API operation is exposed as a + * function on service. + * + * ### Sending a Request Using CloudSearchDomain + * + * ```javascript + * var csd = new AWS.CloudSearchDomain({endpoint: 'my.host.tld'}); + * csd.search(params, function (err, data) { + * if (err) console.log(err, err.stack); // an error occurred + * else console.log(data); // successful response + * }); + * ``` + * + * ### Locking the API Version + * + * In order to ensure that the CloudSearchDomain object uses this specific API, + * you can construct the object by passing the `apiVersion` option to the + * constructor: + * + * ```javascript + * var csd = new AWS.CloudSearchDomain({ + * endpoint: 'my.host.tld', + * apiVersion: '2013-01-01' + * }); + * ``` + * + * You can also set the API version globally in `AWS.config.apiVersions` using + * the **cloudsearchdomain** service identifier: + * + * ```javascript + * AWS.config.apiVersions = { + * cloudsearchdomain: '2013-01-01', + * // other service API versions + * }; + * + * var csd = new AWS.CloudSearchDomain({endpoint: 'my.host.tld'}); + * ``` + * + * @note You *must* provide an `endpoint` configuration parameter when + * constructing this service. See {constructor} for more information. + * + * @!method constructor(options = {}) + * Constructs a service object. This object has one method for each + * API operation. + * + * @example Constructing a CloudSearchDomain object + * var csd = new AWS.CloudSearchDomain({endpoint: 'my.host.tld'}); + * @note You *must* provide an `endpoint` when constructing this service. + * @option (see AWS.Config.constructor) + * + * @service cloudsearchdomain + * @version 2013-01-01 + */ +AWS.util.update(AWS.CloudSearchDomain.prototype, { + /** + * @api private + */ + validateService: function validateService() { + if (!this.config.endpoint || this.config.endpoint.indexOf('{') >= 0) { + var msg = 'AWS.CloudSearchDomain requires an explicit ' + + '`endpoint\' configuration option.'; + throw AWS.util.error(new Error(), + {name: 'InvalidEndpoint', message: msg}); + } + }, + + /** + * @api private + */ + setupRequestListeners: function setupRequestListeners(request) { + request.removeListener('validate', + AWS.EventListeners.Core.VALIDATE_CREDENTIALS + ); + request.onAsync('validate', this.validateCredentials); + request.addListener('validate', this.updateRegion); + if (request.operation === 'search') { + request.addListener('build', this.convertGetToPost); + } + }, + + /** + * @api private + */ + validateCredentials: function(req, done) { + if (!req.service.api.signatureVersion) return done(); // none + req.service.config.getCredentials(function(err) { + if (err) { + req.removeListener('sign', AWS.EventListeners.Core.SIGN); + } + done(); + }); + }, + + /** + * @api private + */ + convertGetToPost: function(request) { + var httpRequest = request.httpRequest; + // convert queries to POST to avoid length restrictions + var path = httpRequest.path.split('?'); + httpRequest.method = 'POST'; + httpRequest.path = path[0]; + httpRequest.body = path[1]; + httpRequest.headers['Content-Length'] = httpRequest.body.length; + httpRequest.headers['Content-Type'] = 'application/x-www-form-urlencoded'; + }, + + /** + * @api private + */ + updateRegion: function updateRegion(request) { + var endpoint = request.httpRequest.endpoint.hostname; + var zones = endpoint.split('.'); + request.httpRequest.region = zones[1] || request.httpRequest.region; + } + +}); + + +/***/ }), + +/***/ 59050: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var rdsutil = __nccwpck_require__(30650); + +/** +* @api private +*/ +var crossRegionOperations = ['createDBCluster', 'copyDBClusterSnapshot']; + +AWS.util.update(AWS.DocDB.prototype, { + /** + * @api private + */ + setupRequestListeners: function setupRequestListeners(request) { + if ( + crossRegionOperations.indexOf(request.operation) !== -1 && + this.config.params && + this.config.params.SourceRegion && + request.params && + !request.params.SourceRegion + ) { + request.params.SourceRegion = this.config.params.SourceRegion; + } + rdsutil.setupRequestListeners(this, request, crossRegionOperations); + }, +}); + + +/***/ }), + +/***/ 17101: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +__nccwpck_require__(90030); + +AWS.util.update(AWS.DynamoDB.prototype, { + /** + * @api private + */ + setupRequestListeners: function setupRequestListeners(request) { + if (request.service.config.dynamoDbCrc32) { + request.removeListener('extractData', AWS.EventListeners.Json.EXTRACT_DATA); + request.addListener('extractData', this.checkCrc32); + request.addListener('extractData', AWS.EventListeners.Json.EXTRACT_DATA); + } + }, + + /** + * @api private + */ + checkCrc32: function checkCrc32(resp) { + if (!resp.httpResponse.streaming && !resp.request.service.crc32IsValid(resp)) { + resp.data = null; + resp.error = AWS.util.error(new Error(), { + code: 'CRC32CheckFailed', + message: 'CRC32 integrity check failed', + retryable: true + }); + resp.request.haltHandlersOnError(); + throw (resp.error); + } + }, + + /** + * @api private + */ + crc32IsValid: function crc32IsValid(resp) { + var crc = resp.httpResponse.headers['x-amz-crc32']; + if (!crc) return true; // no (valid) CRC32 header + return parseInt(crc, 10) === AWS.util.crypto.crc32(resp.httpResponse.body); + }, + + /** + * @api private + */ + defaultRetryCount: 10, + + /** + * @api private + */ + retryDelays: function retryDelays(retryCount, err) { + var retryDelayOptions = AWS.util.copy(this.config.retryDelayOptions); + + if (typeof retryDelayOptions.base !== 'number') { + retryDelayOptions.base = 50; // default for dynamodb + } + var delay = AWS.util.calculateRetryDelay(retryCount, retryDelayOptions, err); + return delay; + } +}); + + +/***/ }), + +/***/ 92501: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +AWS.util.update(AWS.EC2.prototype, { + /** + * @api private + */ + setupRequestListeners: function setupRequestListeners(request) { + request.removeListener('extractError', AWS.EventListeners.Query.EXTRACT_ERROR); + request.addListener('extractError', this.extractError); + + if (request.operation === 'copySnapshot') { + request.onAsync('validate', this.buildCopySnapshotPresignedUrl); + } + }, + + /** + * @api private + */ + buildCopySnapshotPresignedUrl: function buildCopySnapshotPresignedUrl(req, done) { + if (req.params.PresignedUrl || req._subRequest) { + return done(); + } + + req.params = AWS.util.copy(req.params); + req.params.DestinationRegion = req.service.config.region; + + var config = AWS.util.copy(req.service.config); + delete config.endpoint; + config.region = req.params.SourceRegion; + var svc = new req.service.constructor(config); + var newReq = svc[req.operation](req.params); + newReq._subRequest = true; + newReq.presign(function(err, url) { + if (err) done(err); + else { + req.params.PresignedUrl = url; + done(); + } + }); + }, + + /** + * @api private + */ + extractError: function extractError(resp) { + // EC2 nests the error code and message deeper than other AWS Query services. + var httpResponse = resp.httpResponse; + var data = new AWS.XML.Parser().parse(httpResponse.body.toString() || ''); + if (data.Errors) { + resp.error = AWS.util.error(new Error(), { + code: data.Errors.Error.Code, + message: data.Errors.Error.Message + }); + } else { + resp.error = AWS.util.error(new Error(), { + code: httpResponse.statusCode, + message: null + }); + } + resp.error.requestId = data.RequestID || null; + } +}); + + +/***/ }), + +/***/ 3034: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +AWS.util.update(AWS.EventBridge.prototype, { + /** + * @api private + */ + setupRequestListeners: function setupRequestListeners(request) { + if (request.operation === 'putEvents') { + var params = request.params || {}; + if (params.EndpointId !== undefined) { + throw new AWS.util.error(new Error(), { + code: 'InvalidParameter', + message: 'EndpointId is not supported in current SDK.\n' + + 'You should consider switching to V3(https://github.com/aws/aws-sdk-js-v3).' + }); + } + } + }, +}); + + +/***/ }), + +/***/ 14472: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +AWS.util.update(AWS.Glacier.prototype, { + /** + * @api private + */ + setupRequestListeners: function setupRequestListeners(request) { + if (Array.isArray(request._events.validate)) { + request._events.validate.unshift(this.validateAccountId); + } else { + request.on('validate', this.validateAccountId); + } + request.removeListener('afterBuild', + AWS.EventListeners.Core.COMPUTE_SHA256); + request.on('build', this.addGlacierApiVersion); + request.on('build', this.addTreeHashHeaders); + }, + + /** + * @api private + */ + validateAccountId: function validateAccountId(request) { + if (request.params.accountId !== undefined) return; + request.params = AWS.util.copy(request.params); + request.params.accountId = '-'; + }, + + /** + * @api private + */ + addGlacierApiVersion: function addGlacierApiVersion(request) { + var version = request.service.api.apiVersion; + request.httpRequest.headers['x-amz-glacier-version'] = version; + }, + + /** + * @api private + */ + addTreeHashHeaders: function addTreeHashHeaders(request) { + if (request.params.body === undefined) return; + + var hashes = request.service.computeChecksums(request.params.body); + request.httpRequest.headers['X-Amz-Content-Sha256'] = hashes.linearHash; + + if (!request.httpRequest.headers['x-amz-sha256-tree-hash']) { + request.httpRequest.headers['x-amz-sha256-tree-hash'] = hashes.treeHash; + } + }, + + /** + * @!group Computing Checksums + */ + + /** + * Computes the SHA-256 linear and tree hash checksums for a given + * block of Buffer data. Pass the tree hash of the computed checksums + * as the checksum input to the {completeMultipartUpload} when performing + * a multi-part upload. + * + * @example Calculate checksum of 5.5MB data chunk + * var glacier = new AWS.Glacier(); + * var data = Buffer.alloc(5.5 * 1024 * 1024); + * data.fill('0'); // fill with zeros + * var results = glacier.computeChecksums(data); + * // Result: { linearHash: '68aff0c5a9...', treeHash: '154e26c78f...' } + * @param data [Buffer, String] data to calculate the checksum for + * @return [map] a map containing + * the linearHash and treeHash properties representing hex based digests + * of the respective checksums. + * @see completeMultipartUpload + */ + computeChecksums: function computeChecksums(data) { + if (!AWS.util.Buffer.isBuffer(data)) data = AWS.util.buffer.toBuffer(data); + + var mb = 1024 * 1024; + var hashes = []; + var hash = AWS.util.crypto.createHash('sha256'); + + // build leaf nodes in 1mb chunks + for (var i = 0; i < data.length; i += mb) { + var chunk = data.slice(i, Math.min(i + mb, data.length)); + hash.update(chunk); + hashes.push(AWS.util.crypto.sha256(chunk)); + } + + return { + linearHash: hash.digest('hex'), + treeHash: this.buildHashTree(hashes) + }; + }, + + /** + * @api private + */ + buildHashTree: function buildHashTree(hashes) { + // merge leaf nodes + while (hashes.length > 1) { + var tmpHashes = []; + for (var i = 0; i < hashes.length; i += 2) { + if (hashes[i + 1]) { + var tmpHash = AWS.util.buffer.alloc(64); + tmpHash.write(hashes[i], 0, 32, 'binary'); + tmpHash.write(hashes[i + 1], 32, 32, 'binary'); + tmpHashes.push(AWS.util.crypto.sha256(tmpHash)); + } else { + tmpHashes.push(hashes[i]); + } + } + hashes = tmpHashes; + } + + return AWS.util.crypto.toHex(hashes[0]); + } +}); + + +/***/ }), + +/***/ 27062: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +/** + * @api private + */ +var blobPayloadOutputOps = [ + 'deleteThingShadow', + 'getThingShadow', + 'updateThingShadow' +]; + +/** + * Constructs a service interface object. Each API operation is exposed as a + * function on service. + * + * ### Sending a Request Using IotData + * + * ```javascript + * var iotdata = new AWS.IotData({endpoint: 'my.host.tld'}); + * iotdata.getThingShadow(params, function (err, data) { + * if (err) console.log(err, err.stack); // an error occurred + * else console.log(data); // successful response + * }); + * ``` + * + * ### Locking the API Version + * + * In order to ensure that the IotData object uses this specific API, + * you can construct the object by passing the `apiVersion` option to the + * constructor: + * + * ```javascript + * var iotdata = new AWS.IotData({ + * endpoint: 'my.host.tld', + * apiVersion: '2015-05-28' + * }); + * ``` + * + * You can also set the API version globally in `AWS.config.apiVersions` using + * the **iotdata** service identifier: + * + * ```javascript + * AWS.config.apiVersions = { + * iotdata: '2015-05-28', + * // other service API versions + * }; + * + * var iotdata = new AWS.IotData({endpoint: 'my.host.tld'}); + * ``` + * + * @note You *must* provide an `endpoint` configuration parameter when + * constructing this service. See {constructor} for more information. + * + * @!method constructor(options = {}) + * Constructs a service object. This object has one method for each + * API operation. + * + * @example Constructing a IotData object + * var iotdata = new AWS.IotData({endpoint: 'my.host.tld'}); + * @note You *must* provide an `endpoint` when constructing this service. + * @option (see AWS.Config.constructor) + * + * @service iotdata + * @version 2015-05-28 + */ +AWS.util.update(AWS.IotData.prototype, { + /** + * @api private + */ + validateService: function validateService() { + if (!this.config.endpoint || this.config.endpoint.indexOf('{') >= 0) { + var msg = 'AWS.IotData requires an explicit ' + + '`endpoint\' configuration option.'; + throw AWS.util.error(new Error(), + {name: 'InvalidEndpoint', message: msg}); + } + }, + + /** + * @api private + */ + setupRequestListeners: function setupRequestListeners(request) { + request.addListener('validateResponse', this.validateResponseBody); + if (blobPayloadOutputOps.indexOf(request.operation) > -1) { + request.addListener('extractData', AWS.util.convertPayloadToString); + } + }, + + /** + * @api private + */ + validateResponseBody: function validateResponseBody(resp) { + var body = resp.httpResponse.body.toString() || '{}'; + var bodyCheck = body.trim(); + if (!bodyCheck || bodyCheck.charAt(0) !== '{') { + resp.httpResponse.body = ''; + } + } + +}); + + +/***/ }), + +/***/ 8452: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +AWS.util.update(AWS.Lambda.prototype, { + /** + * @api private + */ + setupRequestListeners: function setupRequestListeners(request) { + if (request.operation === 'invoke') { + request.addListener('extractData', AWS.util.convertPayloadToString); + } + } +}); + + + +/***/ }), + +/***/ 19174: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +AWS.util.update(AWS.MachineLearning.prototype, { + /** + * @api private + */ + setupRequestListeners: function setupRequestListeners(request) { + if (request.operation === 'predict') { + request.addListener('build', this.buildEndpoint); + } + }, + + /** + * Updates request endpoint from PredictEndpoint + * @api private + */ + buildEndpoint: function buildEndpoint(request) { + var url = request.params.PredictEndpoint; + if (url) { + request.httpRequest.endpoint = new AWS.Endpoint(url); + } + } + +}); + + +/***/ }), + +/***/ 73090: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var rdsutil = __nccwpck_require__(30650); + +/** +* @api private +*/ +var crossRegionOperations = ['createDBCluster', 'copyDBClusterSnapshot']; + +AWS.util.update(AWS.Neptune.prototype, { + /** + * @api private + */ + setupRequestListeners: function setupRequestListeners(request) { + if ( + crossRegionOperations.indexOf(request.operation) !== -1 && + this.config.params && + this.config.params.SourceRegion && + request.params && + !request.params.SourceRegion + ) { + request.params.SourceRegion = this.config.params.SourceRegion; + } + rdsutil.setupRequestListeners(this, request, crossRegionOperations); + }, +}); + + +/***/ }), + +/***/ 71963: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +if (AWS.NeptuneGraph) { + AWS.util.update(AWS.NeptuneGraph.prototype, { + /** + * @api private + */ + validateService: function validateService() { + var msg = 'AWS Neptune Graph is not available in the AWS SDK for JavaScript v2, consider using the AWS SDK for JavaScript v3: https://www.npmjs.com/package/@aws-sdk/client-neptune-graph'; + throw AWS.util.error(new Error(), + {name: 'ServiceExcludedFromV2', message: msg}); + }, + }); +}; + + +/***/ }), + +/***/ 53199: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +__nccwpck_require__(44086); + + +/***/ }), + +/***/ 71928: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var rdsutil = __nccwpck_require__(30650); +__nccwpck_require__(16612); + /** + * @api private + */ + var crossRegionOperations = ['copyDBSnapshot', 'createDBInstanceReadReplica', 'createDBCluster', 'copyDBClusterSnapshot', 'startDBInstanceAutomatedBackupsReplication']; + + AWS.util.update(AWS.RDS.prototype, { + /** + * @api private + */ + setupRequestListeners: function setupRequestListeners(request) { + rdsutil.setupRequestListeners(this, request, crossRegionOperations); + }, + }); + + +/***/ }), + +/***/ 64070: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +AWS.util.update(AWS.RDSDataService.prototype, { + /** + * @return [Boolean] whether the error can be retried + * @api private + */ + retryableError: function retryableError(error) { + if (error.code === 'BadRequestException' && + error.message && + error.message.match(/^Communications link failure/) && + error.statusCode === 400) { + return true; + } else { + var _super = AWS.Service.prototype.retryableError; + return _super.call(this, error); + } + } +}); + + +/***/ }), + +/***/ 30650: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +var rdsutil = { + /** + * @api private + */ + setupRequestListeners: function setupRequestListeners(service, request, crossRegionOperations) { + if (crossRegionOperations.indexOf(request.operation) !== -1 && + request.params.SourceRegion) { + request.params = AWS.util.copy(request.params); + if (request.params.PreSignedUrl || + request.params.SourceRegion === service.config.region) { + delete request.params.SourceRegion; + } else { + var doesParamValidation = !!service.config.paramValidation; + // remove the validate parameters listener so we can re-add it after we build the URL + if (doesParamValidation) { + request.removeListener('validate', AWS.EventListeners.Core.VALIDATE_PARAMETERS); + } + request.onAsync('validate', rdsutil.buildCrossRegionPresignedUrl); + if (doesParamValidation) { + request.addListener('validate', AWS.EventListeners.Core.VALIDATE_PARAMETERS); + } + } + } + }, + + /** + * @api private + */ + buildCrossRegionPresignedUrl: function buildCrossRegionPresignedUrl(req, done) { + var config = AWS.util.copy(req.service.config); + config.region = req.params.SourceRegion; + delete req.params.SourceRegion; + delete config.endpoint; + // relevant params for the operation will already be in req.params + delete config.params; + config.signatureVersion = 'v4'; + var destinationRegion = req.service.config.region; + + var svc = new req.service.constructor(config); + var newReq = svc[req.operation](AWS.util.copy(req.params)); + newReq.on('build', function addDestinationRegionParam(request) { + var httpRequest = request.httpRequest; + httpRequest.params.DestinationRegion = destinationRegion; + httpRequest.body = AWS.util.queryParamsToString(httpRequest.params); + }); + newReq.presign(function(err, url) { + if (err) done(err); + else { + req.params.PreSignedUrl = url; + done(); + } + }); + } +}; + +/** + * @api private + */ +module.exports = rdsutil; + + +/***/ }), + +/***/ 69627: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +AWS.util.update(AWS.Route53.prototype, { + /** + * @api private + */ + setupRequestListeners: function setupRequestListeners(request) { + request.on('build', this.sanitizeUrl); + }, + + /** + * @api private + */ + sanitizeUrl: function sanitizeUrl(request) { + var path = request.httpRequest.path; + request.httpRequest.path = path.replace(/\/%2F\w+%2F/, '/'); + }, + + /** + * @return [Boolean] whether the error can be retried + * @api private + */ + retryableError: function retryableError(error) { + if (error.code === 'PriorRequestNotComplete' && + error.statusCode === 400) { + return true; + } else { + var _super = AWS.Service.prototype.retryableError; + return _super.call(this, error); + } + } +}); + + +/***/ }), + +/***/ 26543: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var v4Credentials = __nccwpck_require__(62660); +var resolveRegionalEndpointsFlag = __nccwpck_require__(85566); +var s3util = __nccwpck_require__(35895); +var regionUtil = __nccwpck_require__(18262); + +// Pull in managed upload extension +__nccwpck_require__(81600); + +/** + * @api private + */ +var operationsWith200StatusCodeError = { + 'completeMultipartUpload': true, + 'copyObject': true, + 'uploadPartCopy': true +}; + +/** + * @api private + */ + var regionRedirectErrorCodes = [ + 'AuthorizationHeaderMalformed', // non-head operations on virtual-hosted global bucket endpoints + 'BadRequest', // head operations on virtual-hosted global bucket endpoints + 'PermanentRedirect', // non-head operations on path-style or regional endpoints + 301 // head operations on path-style or regional endpoints + ]; + +var OBJECT_LAMBDA_SERVICE = 's3-object-lambda'; + +AWS.util.update(AWS.S3.prototype, { + /** + * @api private + */ + getSignatureVersion: function getSignatureVersion(request) { + var defaultApiVersion = this.api.signatureVersion; + var userDefinedVersion = this._originalConfig ? this._originalConfig.signatureVersion : null; + var regionDefinedVersion = this.config.signatureVersion; + var isPresigned = request ? request.isPresigned() : false; + /* + 1) User defined version specified: + a) always return user defined version + 2) No user defined version specified: + a) If not using presigned urls, default to V4 + b) If using presigned urls, default to lowest version the region supports + */ + if (userDefinedVersion) { + userDefinedVersion = userDefinedVersion === 'v2' ? 's3' : userDefinedVersion; + return userDefinedVersion; + } + if (isPresigned !== true) { + defaultApiVersion = 'v4'; + } else if (regionDefinedVersion) { + defaultApiVersion = regionDefinedVersion; + } + return defaultApiVersion; + }, + + /** + * @api private + */ + getSigningName: function getSigningName(req) { + if (req && req.operation === 'writeGetObjectResponse') { + return OBJECT_LAMBDA_SERVICE; + } + + var _super = AWS.Service.prototype.getSigningName; + return (req && req._parsedArn && req._parsedArn.service) + ? req._parsedArn.service + : _super.call(this); + }, + + /** + * @api private + */ + getSignerClass: function getSignerClass(request) { + var signatureVersion = this.getSignatureVersion(request); + return AWS.Signers.RequestSigner.getVersion(signatureVersion); + }, + + /** + * @api private + */ + validateService: function validateService() { + var msg; + var messages = []; + + // default to us-east-1 when no region is provided + if (!this.config.region) this.config.region = 'us-east-1'; + + if (!this.config.endpoint && this.config.s3BucketEndpoint) { + messages.push('An endpoint must be provided when configuring ' + + '`s3BucketEndpoint` to true.'); + } + if (messages.length === 1) { + msg = messages[0]; + } else if (messages.length > 1) { + msg = 'Multiple configuration errors:\n' + messages.join('\n'); + } + if (msg) { + throw AWS.util.error(new Error(), + {name: 'InvalidEndpoint', message: msg}); + } + }, + + /** + * @api private + */ + shouldDisableBodySigning: function shouldDisableBodySigning(request) { + var signerClass = this.getSignerClass(); + if (this.config.s3DisableBodySigning === true && signerClass === AWS.Signers.V4 + && request.httpRequest.endpoint.protocol === 'https:') { + return true; + } + return false; + }, + + /** + * @api private + */ + setupRequestListeners: function setupRequestListeners(request) { + var prependListener = true; + request.addListener('validate', this.validateScheme); + request.addListener('validate', this.validateBucketName, prependListener); + request.addListener('validate', this.optInUsEast1RegionalEndpoint, prependListener); + + request.removeListener('validate', + AWS.EventListeners.Core.VALIDATE_REGION); + request.addListener('build', this.addContentType); + request.addListener('build', this.computeContentMd5); + request.addListener('build', this.computeSseCustomerKeyMd5); + request.addListener('build', this.populateURI); + request.addListener('afterBuild', this.addExpect100Continue); + request.addListener('extractError', this.extractError); + request.addListener('extractData', AWS.util.hoistPayloadMember); + request.addListener('extractData', this.extractData); + request.addListener('extractData', this.extractErrorFrom200Response); + request.addListener('beforePresign', this.prepareSignedUrl); + if (this.shouldDisableBodySigning(request)) { + request.removeListener('afterBuild', AWS.EventListeners.Core.COMPUTE_SHA256); + request.addListener('afterBuild', this.disableBodySigning); + } + //deal with ARNs supplied to Bucket + if (request.operation !== 'createBucket' && s3util.isArnInParam(request, 'Bucket')) { + // avoid duplicate parsing in the future + request._parsedArn = AWS.util.ARN.parse(request.params.Bucket); + + request.removeListener('validate', this.validateBucketName); + request.removeListener('build', this.populateURI); + if (request._parsedArn.service === 's3') { + request.addListener('validate', s3util.validateS3AccessPointArn); + request.addListener('validate', this.validateArnResourceType); + request.addListener('validate', this.validateArnRegion); + } else if (request._parsedArn.service === 's3-outposts') { + request.addListener('validate', s3util.validateOutpostsAccessPointArn); + request.addListener('validate', s3util.validateOutpostsArn); + request.addListener('validate', s3util.validateArnRegion); + } + request.addListener('validate', s3util.validateArnAccount); + request.addListener('validate', s3util.validateArnService); + request.addListener('build', this.populateUriFromAccessPointArn); + request.addListener('build', s3util.validatePopulateUriFromArn); + return; + } + //listeners regarding region inference + request.addListener('validate', this.validateBucketEndpoint); + request.addListener('validate', this.correctBucketRegionFromCache); + request.onAsync('extractError', this.requestBucketRegion); + if (AWS.util.isBrowser()) { + request.onAsync('retry', this.reqRegionForNetworkingError); + } + }, + + /** + * @api private + */ + validateScheme: function(req) { + var params = req.params, + scheme = req.httpRequest.endpoint.protocol, + sensitive = params.SSECustomerKey || params.CopySourceSSECustomerKey; + if (sensitive && scheme !== 'https:') { + var msg = 'Cannot send SSE keys over HTTP. Set \'sslEnabled\'' + + 'to \'true\' in your configuration'; + throw AWS.util.error(new Error(), + { code: 'ConfigError', message: msg }); + } + }, + + /** + * @api private + */ + validateBucketEndpoint: function(req) { + if (!req.params.Bucket && req.service.config.s3BucketEndpoint) { + var msg = 'Cannot send requests to root API with `s3BucketEndpoint` set.'; + throw AWS.util.error(new Error(), + { code: 'ConfigError', message: msg }); + } + }, + + /** + * @api private + */ + validateArnRegion: function validateArnRegion(req) { + s3util.validateArnRegion(req, { allowFipsEndpoint: true }); + }, + + /** + * Validate resource-type supplied in S3 ARN + */ + validateArnResourceType: function validateArnResourceType(req) { + var resource = req._parsedArn.resource; + + if ( + resource.indexOf('accesspoint:') !== 0 && + resource.indexOf('accesspoint/') !== 0 + ) { + throw AWS.util.error(new Error(), { + code: 'InvalidARN', + message: 'ARN resource should begin with \'accesspoint/\'' + }); + } + }, + + /** + * @api private + */ + validateBucketName: function validateBucketName(req) { + var service = req.service; + var signatureVersion = service.getSignatureVersion(req); + var bucket = req.params && req.params.Bucket; + var key = req.params && req.params.Key; + var slashIndex = bucket && bucket.indexOf('/'); + if (bucket && slashIndex >= 0) { + if (typeof key === 'string' && slashIndex > 0) { + req.params = AWS.util.copy(req.params); + // Need to include trailing slash to match sigv2 behavior + var prefix = bucket.substr(slashIndex + 1) || ''; + req.params.Key = prefix + '/' + key; + req.params.Bucket = bucket.substr(0, slashIndex); + } else if (signatureVersion === 'v4') { + var msg = 'Bucket names cannot contain forward slashes. Bucket: ' + bucket; + throw AWS.util.error(new Error(), + { code: 'InvalidBucket', message: msg }); + } + } + }, + + /** + * @api private + */ + isValidAccelerateOperation: function isValidAccelerateOperation(operation) { + var invalidOperations = [ + 'createBucket', + 'deleteBucket', + 'listBuckets' + ]; + return invalidOperations.indexOf(operation) === -1; + }, + + /** + * When us-east-1 region endpoint configuration is set, in stead of sending request to + * global endpoint(e.g. 's3.amazonaws.com'), we will send request to + * 's3.us-east-1.amazonaws.com'. + * @api private + */ + optInUsEast1RegionalEndpoint: function optInUsEast1RegionalEndpoint(req) { + var service = req.service; + var config = service.config; + config.s3UsEast1RegionalEndpoint = resolveRegionalEndpointsFlag(service._originalConfig, { + env: 'AWS_S3_US_EAST_1_REGIONAL_ENDPOINT', + sharedConfig: 's3_us_east_1_regional_endpoint', + clientConfig: 's3UsEast1RegionalEndpoint' + }); + if ( + !(service._originalConfig || {}).endpoint && + req.httpRequest.region === 'us-east-1' && + config.s3UsEast1RegionalEndpoint === 'regional' && + req.httpRequest.endpoint.hostname.indexOf('s3.amazonaws.com') >= 0 + ) { + var insertPoint = config.endpoint.indexOf('.amazonaws.com'); + var regionalEndpoint = config.endpoint.substring(0, insertPoint) + + '.us-east-1' + config.endpoint.substring(insertPoint); + req.httpRequest.updateEndpoint(regionalEndpoint); + } + }, + + /** + * S3 prefers dns-compatible bucket names to be moved from the uri path + * to the hostname as a sub-domain. This is not possible, even for dns-compat + * buckets when using SSL and the bucket name contains a dot ('.'). The + * ssl wildcard certificate is only 1-level deep. + * + * @api private + */ + populateURI: function populateURI(req) { + var httpRequest = req.httpRequest; + var b = req.params.Bucket; + var service = req.service; + var endpoint = httpRequest.endpoint; + if (b) { + if (!service.pathStyleBucketName(b)) { + if (service.config.useAccelerateEndpoint && service.isValidAccelerateOperation(req.operation)) { + if (service.config.useDualstackEndpoint) { + endpoint.hostname = b + '.s3-accelerate.dualstack.amazonaws.com'; + } else { + endpoint.hostname = b + '.s3-accelerate.amazonaws.com'; + } + } else if (!service.config.s3BucketEndpoint) { + endpoint.hostname = + b + '.' + endpoint.hostname; + } + + var port = endpoint.port; + if (port !== 80 && port !== 443) { + endpoint.host = endpoint.hostname + ':' + + endpoint.port; + } else { + endpoint.host = endpoint.hostname; + } + + httpRequest.virtualHostedBucket = b; // needed for signing the request + service.removeVirtualHostedBucketFromPath(req); + } + } + }, + + /** + * Takes the bucket name out of the path if bucket is virtual-hosted + * + * @api private + */ + removeVirtualHostedBucketFromPath: function removeVirtualHostedBucketFromPath(req) { + var httpRequest = req.httpRequest; + var bucket = httpRequest.virtualHostedBucket; + if (bucket && httpRequest.path) { + if (req.params && req.params.Key) { + var encodedS3Key = '/' + AWS.util.uriEscapePath(req.params.Key); + if (httpRequest.path.indexOf(encodedS3Key) === 0 && (httpRequest.path.length === encodedS3Key.length || httpRequest.path[encodedS3Key.length] === '?')) { + //path only contains key or path contains only key and querystring + return; + } + } + httpRequest.path = httpRequest.path.replace(new RegExp('/' + bucket), ''); + if (httpRequest.path[0] !== '/') { + httpRequest.path = '/' + httpRequest.path; + } + } + }, + + /** + * When user supply an access point ARN in the Bucket parameter, we need to + * populate the URI according to the ARN. + */ + populateUriFromAccessPointArn: function populateUriFromAccessPointArn(req) { + var accessPointArn = req._parsedArn; + + var isOutpostArn = accessPointArn.service === 's3-outposts'; + var isObjectLambdaArn = accessPointArn.service === 's3-object-lambda'; + + var outpostsSuffix = isOutpostArn ? '.' + accessPointArn.outpostId: ''; + var serviceName = isOutpostArn ? 's3-outposts': 's3-accesspoint'; + var fipsSuffix = !isOutpostArn && req.service.config.useFipsEndpoint ? '-fips': ''; + var dualStackSuffix = !isOutpostArn && + req.service.config.useDualstackEndpoint ? '.dualstack' : ''; + + var endpoint = req.httpRequest.endpoint; + var dnsSuffix = regionUtil.getEndpointSuffix(accessPointArn.region); + var useArnRegion = req.service.config.s3UseArnRegion; + + endpoint.hostname = [ + accessPointArn.accessPoint + '-' + accessPointArn.accountId + outpostsSuffix, + serviceName + fipsSuffix + dualStackSuffix, + useArnRegion ? accessPointArn.region : req.service.config.region, + dnsSuffix + ].join('.'); + + if (isObjectLambdaArn) { + // should be in the format: "accesspoint/${accesspointName}" + var serviceName = 's3-object-lambda'; + var accesspointName = accessPointArn.resource.split('/')[1]; + var fipsSuffix = req.service.config.useFipsEndpoint ? '-fips': ''; + endpoint.hostname = [ + accesspointName + '-' + accessPointArn.accountId, + serviceName + fipsSuffix, + useArnRegion ? accessPointArn.region : req.service.config.region, + dnsSuffix + ].join('.'); + } + endpoint.host = endpoint.hostname; + var encodedArn = AWS.util.uriEscape(req.params.Bucket); + var path = req.httpRequest.path; + //remove the Bucket value from path + req.httpRequest.path = path.replace(new RegExp('/' + encodedArn), ''); + if (req.httpRequest.path[0] !== '/') { + req.httpRequest.path = '/' + req.httpRequest.path; + } + req.httpRequest.region = accessPointArn.region; //region used to sign + }, + + /** + * Adds Expect: 100-continue header if payload is greater-or-equal 1MB + * @api private + */ + addExpect100Continue: function addExpect100Continue(req) { + var len = req.httpRequest.headers['Content-Length']; + if (AWS.util.isNode() && (len >= 1024 * 1024 || req.params.Body instanceof AWS.util.stream.Stream)) { + req.httpRequest.headers['Expect'] = '100-continue'; + } + }, + + /** + * Adds a default content type if none is supplied. + * + * @api private + */ + addContentType: function addContentType(req) { + var httpRequest = req.httpRequest; + if (httpRequest.method === 'GET' || httpRequest.method === 'HEAD') { + // Content-Type is not set in GET/HEAD requests + delete httpRequest.headers['Content-Type']; + return; + } + + if (!httpRequest.headers['Content-Type']) { // always have a Content-Type + httpRequest.headers['Content-Type'] = 'application/octet-stream'; + } + + var contentType = httpRequest.headers['Content-Type']; + if (AWS.util.isBrowser()) { + if (typeof httpRequest.body === 'string' && !contentType.match(/;\s*charset=/)) { + var charset = '; charset=UTF-8'; + httpRequest.headers['Content-Type'] += charset; + } else { + var replaceFn = function(_, prefix, charsetName) { + return prefix + charsetName.toUpperCase(); + }; + + httpRequest.headers['Content-Type'] = + contentType.replace(/(;\s*charset=)(.+)$/, replaceFn); + } + } + }, + + /** + * Checks whether checksums should be computed for the request if it's not + * already set by {AWS.EventListeners.Core.COMPUTE_CHECKSUM}. It depends on + * whether {AWS.Config.computeChecksums} is set. + * + * @param req [AWS.Request] the request to check against + * @return [Boolean] whether to compute checksums for a request. + * @api private + */ + willComputeChecksums: function willComputeChecksums(req) { + var rules = req.service.api.operations[req.operation].input.members; + var body = req.httpRequest.body; + var needsContentMD5 = req.service.config.computeChecksums && + rules.ContentMD5 && + !req.params.ContentMD5 && + body && + (AWS.util.Buffer.isBuffer(req.httpRequest.body) || typeof req.httpRequest.body === 'string'); + + // Sha256 signing disabled, and not a presigned url + if (needsContentMD5 && req.service.shouldDisableBodySigning(req) && !req.isPresigned()) { + return true; + } + + // SigV2 and presign, for backwards compatibility purpose. + if (needsContentMD5 && this.getSignatureVersion(req) === 's3' && req.isPresigned()) { + return true; + } + + return false; + }, + + /** + * A listener that computes the Content-MD5 and sets it in the header. + * This listener is to support S3-specific features like + * s3DisableBodySigning and SigV2 presign. Content MD5 logic for SigV4 is + * handled in AWS.EventListeners.Core.COMPUTE_CHECKSUM + * + * @api private + */ + computeContentMd5: function computeContentMd5(req) { + if (req.service.willComputeChecksums(req)) { + var md5 = AWS.util.crypto.md5(req.httpRequest.body, 'base64'); + req.httpRequest.headers['Content-MD5'] = md5; + } + }, + + /** + * @api private + */ + computeSseCustomerKeyMd5: function computeSseCustomerKeyMd5(req) { + var keys = { + SSECustomerKey: 'x-amz-server-side-encryption-customer-key-MD5', + CopySourceSSECustomerKey: 'x-amz-copy-source-server-side-encryption-customer-key-MD5' + }; + AWS.util.each(keys, function(key, header) { + if (req.params[key]) { + var value = AWS.util.crypto.md5(req.params[key], 'base64'); + req.httpRequest.headers[header] = value; + } + }); + }, + + /** + * Returns true if the bucket name should be left in the URI path for + * a request to S3. This function takes into account the current + * endpoint protocol (e.g. http or https). + * + * @api private + */ + pathStyleBucketName: function pathStyleBucketName(bucketName) { + // user can force path style requests via the configuration + if (this.config.s3ForcePathStyle) return true; + if (this.config.s3BucketEndpoint) return false; + + if (s3util.dnsCompatibleBucketName(bucketName)) { + return (this.config.sslEnabled && bucketName.match(/\./)) ? true : false; + } else { + return true; // not dns compatible names must always use path style + } + }, + + /** + * For COPY operations, some can be error even with status code 200. + * SDK treats the response as exception when response body indicates + * an exception or body is empty. + * + * @api private + */ + extractErrorFrom200Response: function extractErrorFrom200Response(resp) { + if (!operationsWith200StatusCodeError[resp.request.operation]) return; + var httpResponse = resp.httpResponse; + if (httpResponse.body && httpResponse.body.toString().match('')) { + // Response body with '...' indicates an exception. + // Get S3 client object. In ManagedUpload, this.service refers to + // S3 client object. + resp.data = null; + var service = this.service ? this.service : this; + service.extractError(resp); + throw resp.error; + } else if (!httpResponse.body || !httpResponse.body.toString().match(/<[\w_]/)) { + // When body is empty or incomplete, S3 might stop the request on detecting client + // side aborting the request. + resp.data = null; + throw AWS.util.error(new Error(), { + code: 'InternalError', + message: 'S3 aborted request' + }); + } + }, + + /** + * @return [Boolean] whether the error can be retried + * @api private + */ + retryableError: function retryableError(error, request) { + if (operationsWith200StatusCodeError[request.operation] && + error.statusCode === 200) { + return true; + } else if (request._requestRegionForBucket && + request.service.bucketRegionCache[request._requestRegionForBucket]) { + return false; + } else if (error && error.code === 'RequestTimeout') { + return true; + } else if (error && + regionRedirectErrorCodes.indexOf(error.code) != -1 && + error.region && error.region != request.httpRequest.region) { + request.httpRequest.region = error.region; + if (error.statusCode === 301) { + request.service.updateReqBucketRegion(request); + } + return true; + } else { + var _super = AWS.Service.prototype.retryableError; + return _super.call(this, error, request); + } + }, + + /** + * Updates httpRequest with region. If region is not provided, then + * the httpRequest will be updated based on httpRequest.region + * + * @api private + */ + updateReqBucketRegion: function updateReqBucketRegion(request, region) { + var httpRequest = request.httpRequest; + if (typeof region === 'string' && region.length) { + httpRequest.region = region; + } + if (!httpRequest.endpoint.host.match(/s3(?!-accelerate).*\.amazonaws\.com$/)) { + return; + } + var service = request.service; + var s3Config = service.config; + var s3BucketEndpoint = s3Config.s3BucketEndpoint; + if (s3BucketEndpoint) { + delete s3Config.s3BucketEndpoint; + } + var newConfig = AWS.util.copy(s3Config); + delete newConfig.endpoint; + newConfig.region = httpRequest.region; + + httpRequest.endpoint = (new AWS.S3(newConfig)).endpoint; + service.populateURI(request); + s3Config.s3BucketEndpoint = s3BucketEndpoint; + httpRequest.headers.Host = httpRequest.endpoint.host; + + if (request._asm.currentState === 'validate') { + request.removeListener('build', service.populateURI); + request.addListener('build', service.removeVirtualHostedBucketFromPath); + } + }, + + /** + * Provides a specialized parser for getBucketLocation -- all other + * operations are parsed by the super class. + * + * @api private + */ + extractData: function extractData(resp) { + var req = resp.request; + if (req.operation === 'getBucketLocation') { + var match = resp.httpResponse.body.toString().match(/>(.+)<\/Location/); + delete resp.data['_']; + if (match) { + resp.data.LocationConstraint = match[1]; + } else { + resp.data.LocationConstraint = ''; + } + } + var bucket = req.params.Bucket || null; + if (req.operation === 'deleteBucket' && typeof bucket === 'string' && !resp.error) { + req.service.clearBucketRegionCache(bucket); + } else { + var headers = resp.httpResponse.headers || {}; + var region = headers['x-amz-bucket-region'] || null; + if (!region && req.operation === 'createBucket' && !resp.error) { + var createBucketConfiguration = req.params.CreateBucketConfiguration; + if (!createBucketConfiguration) { + region = 'us-east-1'; + } else if (createBucketConfiguration.LocationConstraint === 'EU') { + region = 'eu-west-1'; + } else { + region = createBucketConfiguration.LocationConstraint; + } + } + if (region) { + if (bucket && region !== req.service.bucketRegionCache[bucket]) { + req.service.bucketRegionCache[bucket] = region; + } + } + } + req.service.extractRequestIds(resp); + }, + + /** + * Extracts an error object from the http response. + * + * @api private + */ + extractError: function extractError(resp) { + var codes = { + 304: 'NotModified', + 403: 'Forbidden', + 400: 'BadRequest', + 404: 'NotFound' + }; + + var req = resp.request; + var code = resp.httpResponse.statusCode; + var body = resp.httpResponse.body || ''; + + var headers = resp.httpResponse.headers || {}; + var region = headers['x-amz-bucket-region'] || null; + var bucket = req.params.Bucket || null; + var bucketRegionCache = req.service.bucketRegionCache; + if (region && bucket && region !== bucketRegionCache[bucket]) { + bucketRegionCache[bucket] = region; + } + + var cachedRegion; + if (codes[code] && body.length === 0) { + if (bucket && !region) { + cachedRegion = bucketRegionCache[bucket] || null; + if (cachedRegion !== req.httpRequest.region) { + region = cachedRegion; + } + } + resp.error = AWS.util.error(new Error(), { + code: codes[code], + message: null, + region: region + }); + } else { + var data = new AWS.XML.Parser().parse(body.toString()); + + if (data.Region && !region) { + region = data.Region; + if (bucket && region !== bucketRegionCache[bucket]) { + bucketRegionCache[bucket] = region; + } + } else if (bucket && !region && !data.Region) { + cachedRegion = bucketRegionCache[bucket] || null; + if (cachedRegion !== req.httpRequest.region) { + region = cachedRegion; + } + } + + resp.error = AWS.util.error(new Error(), { + code: data.Code || code, + message: data.Message || null, + region: region + }); + } + req.service.extractRequestIds(resp); + }, + + /** + * If region was not obtained synchronously, then send async request + * to get bucket region for errors resulting from wrong region. + * + * @api private + */ + requestBucketRegion: function requestBucketRegion(resp, done) { + var error = resp.error; + var req = resp.request; + var bucket = req.params.Bucket || null; + + if (!error || !bucket || error.region || req.operation === 'listObjects' || + (AWS.util.isNode() && req.operation === 'headBucket') || + (error.statusCode === 400 && req.operation !== 'headObject') || + regionRedirectErrorCodes.indexOf(error.code) === -1) { + return done(); + } + var reqOperation = AWS.util.isNode() ? 'headBucket' : 'listObjects'; + var reqParams = {Bucket: bucket}; + if (reqOperation === 'listObjects') reqParams.MaxKeys = 0; + var regionReq = req.service[reqOperation](reqParams); + regionReq._requestRegionForBucket = bucket; + regionReq.send(function() { + var region = req.service.bucketRegionCache[bucket] || null; + error.region = region; + done(); + }); + }, + + /** + * For browser only. If NetworkingError received, will attempt to obtain + * the bucket region. + * + * @api private + */ + reqRegionForNetworkingError: function reqRegionForNetworkingError(resp, done) { + if (!AWS.util.isBrowser()) { + return done(); + } + var error = resp.error; + var request = resp.request; + var bucket = request.params.Bucket; + if (!error || error.code !== 'NetworkingError' || !bucket || + request.httpRequest.region === 'us-east-1') { + return done(); + } + var service = request.service; + var bucketRegionCache = service.bucketRegionCache; + var cachedRegion = bucketRegionCache[bucket] || null; + + if (cachedRegion && cachedRegion !== request.httpRequest.region) { + service.updateReqBucketRegion(request, cachedRegion); + done(); + } else if (!s3util.dnsCompatibleBucketName(bucket)) { + service.updateReqBucketRegion(request, 'us-east-1'); + if (bucketRegionCache[bucket] !== 'us-east-1') { + bucketRegionCache[bucket] = 'us-east-1'; + } + done(); + } else if (request.httpRequest.virtualHostedBucket) { + var getRegionReq = service.listObjects({Bucket: bucket, MaxKeys: 0}); + service.updateReqBucketRegion(getRegionReq, 'us-east-1'); + getRegionReq._requestRegionForBucket = bucket; + + getRegionReq.send(function() { + var region = service.bucketRegionCache[bucket] || null; + if (region && region !== request.httpRequest.region) { + service.updateReqBucketRegion(request, region); + } + done(); + }); + } else { + // DNS-compatible path-style + // (s3ForcePathStyle or bucket name with dot over https) + // Cannot obtain region information for this case + done(); + } + }, + + /** + * Cache for bucket region. + * + * @api private + */ + bucketRegionCache: {}, + + /** + * Clears bucket region cache. + * + * @api private + */ + clearBucketRegionCache: function(buckets) { + var bucketRegionCache = this.bucketRegionCache; + if (!buckets) { + buckets = Object.keys(bucketRegionCache); + } else if (typeof buckets === 'string') { + buckets = [buckets]; + } + for (var i = 0; i < buckets.length; i++) { + delete bucketRegionCache[buckets[i]]; + } + return bucketRegionCache; + }, + + /** + * Corrects request region if bucket's cached region is different + * + * @api private + */ + correctBucketRegionFromCache: function correctBucketRegionFromCache(req) { + var bucket = req.params.Bucket || null; + if (bucket) { + var service = req.service; + var requestRegion = req.httpRequest.region; + var cachedRegion = service.bucketRegionCache[bucket]; + if (cachedRegion && cachedRegion !== requestRegion) { + service.updateReqBucketRegion(req, cachedRegion); + } + } + }, + + /** + * Extracts S3 specific request ids from the http response. + * + * @api private + */ + extractRequestIds: function extractRequestIds(resp) { + var extendedRequestId = resp.httpResponse.headers ? resp.httpResponse.headers['x-amz-id-2'] : null; + var cfId = resp.httpResponse.headers ? resp.httpResponse.headers['x-amz-cf-id'] : null; + resp.extendedRequestId = extendedRequestId; + resp.cfId = cfId; + + if (resp.error) { + resp.error.requestId = resp.requestId || null; + resp.error.extendedRequestId = extendedRequestId; + resp.error.cfId = cfId; + } + }, + + /** + * Get a pre-signed URL for a given operation name. + * + * @note You must ensure that you have static or previously resolved + * credentials if you call this method synchronously (with no callback), + * otherwise it may not properly sign the request. If you cannot guarantee + * this (you are using an asynchronous credential provider, i.e., EC2 + * IAM roles), you should always call this method with an asynchronous + * callback. + * @note Not all operation parameters are supported when using pre-signed + * URLs. Certain parameters, such as `SSECustomerKey`, `ACL`, `Expires`, + * `ContentLength`, or `Tagging` must be provided as headers when sending a + * request. If you are using pre-signed URLs to upload from a browser and + * need to use these fields, see {createPresignedPost}. + * @note The default signer allows altering the request by adding corresponding + * headers to set some parameters (e.g. Range) and these added parameters + * won't be signed. You must use signatureVersion v4 to to include these + * parameters in the signed portion of the URL and enforce exact matching + * between headers and signed params in the URL. + * @note This operation cannot be used with a promise. See note above regarding + * asynchronous credentials and use with a callback. + * @param operation [String] the name of the operation to call + * @param params [map] parameters to pass to the operation. See the given + * operation for the expected operation parameters. In addition, you can + * also pass the "Expires" parameter to inform S3 how long the URL should + * work for. + * @option params Expires [Integer] (900) the number of seconds to expire + * the pre-signed URL operation in. Defaults to 15 minutes. + * @param callback [Function] if a callback is provided, this function will + * pass the URL as the second parameter (after the error parameter) to + * the callback function. + * @return [String] if called synchronously (with no callback), returns the + * signed URL. + * @return [null] nothing is returned if a callback is provided. + * @example Pre-signing a getObject operation (synchronously) + * var params = {Bucket: 'bucket', Key: 'key'}; + * var url = s3.getSignedUrl('getObject', params); + * console.log('The URL is', url); + * @example Pre-signing a putObject (asynchronously) + * var params = {Bucket: 'bucket', Key: 'key'}; + * s3.getSignedUrl('putObject', params, function (err, url) { + * console.log('The URL is', url); + * }); + * @example Pre-signing a putObject operation with a specific payload + * var params = {Bucket: 'bucket', Key: 'key', Body: 'body'}; + * var url = s3.getSignedUrl('putObject', params); + * console.log('The URL is', url); + * @example Passing in a 1-minute expiry time for a pre-signed URL + * var params = {Bucket: 'bucket', Key: 'key', Expires: 60}; + * var url = s3.getSignedUrl('getObject', params); + * console.log('The URL is', url); // expires in 60 seconds + */ + getSignedUrl: function getSignedUrl(operation, params, callback) { + params = AWS.util.copy(params || {}); + var expires = params.Expires || 900; + + if (typeof expires !== 'number') { + throw AWS.util.error(new Error(), + { code: 'InvalidParameterException', message: 'The expiration must be a number, received ' + typeof expires }); + } + + delete params.Expires; // we can't validate this + var request = this.makeRequest(operation, params); + + if (callback) { + AWS.util.defer(function() { + request.presign(expires, callback); + }); + } else { + return request.presign(expires, callback); + } + }, + + /** + * @!method getSignedUrlPromise() + * Returns a 'thenable' promise that will be resolved with a pre-signed URL + * for a given operation name. + * + * Two callbacks can be provided to the `then` method on the returned promise. + * The first callback will be called if the promise is fulfilled, and the second + * callback will be called if the promise is rejected. + * @note Not all operation parameters are supported when using pre-signed + * URLs. Certain parameters, such as `SSECustomerKey`, `ACL`, `Expires`, + * `ContentLength`, or `Tagging` must be provided as headers when sending a + * request. If you are using pre-signed URLs to upload from a browser and + * need to use these fields, see {createPresignedPost}. + * @param operation [String] the name of the operation to call + * @param params [map] parameters to pass to the operation. See the given + * operation for the expected operation parameters. In addition, you can + * also pass the "Expires" parameter to inform S3 how long the URL should + * work for. + * @option params Expires [Integer] (900) the number of seconds to expire + * the pre-signed URL operation in. Defaults to 15 minutes. + * @callback fulfilledCallback function(url) + * Called if the promise is fulfilled. + * @param url [String] the signed url + * @callback rejectedCallback function(err) + * Called if the promise is rejected. + * @param err [Error] if an error occurred, this value will be filled + * @return [Promise] A promise that represents the state of the `refresh` call. + * @example Pre-signing a getObject operation + * var params = {Bucket: 'bucket', Key: 'key'}; + * var promise = s3.getSignedUrlPromise('getObject', params); + * promise.then(function(url) { + * console.log('The URL is', url); + * }, function(err) { ... }); + * @example Pre-signing a putObject operation with a specific payload + * var params = {Bucket: 'bucket', Key: 'key', Body: 'body'}; + * var promise = s3.getSignedUrlPromise('putObject', params); + * promise.then(function(url) { + * console.log('The URL is', url); + * }, function(err) { ... }); + * @example Passing in a 1-minute expiry time for a pre-signed URL + * var params = {Bucket: 'bucket', Key: 'key', Expires: 60}; + * var promise = s3.getSignedUrlPromise('getObject', params); + * promise.then(function(url) { + * console.log('The URL is', url); + * }, function(err) { ... }); + */ + + /** + * Get a pre-signed POST policy to support uploading to S3 directly from an + * HTML form. + * + * @param params [map] + * @option params Bucket [String] The bucket to which the post should be + * uploaded + * @option params Expires [Integer] (3600) The number of seconds for which + * the presigned policy should be valid. + * @option params Conditions [Array] An array of conditions that must be met + * for the presigned policy to allow the + * upload. This can include required tags, + * the accepted range for content lengths, + * etc. + * @see http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-HTTPPOSTConstructPolicy.html + * @option params Fields [map] Fields to include in the form. All + * values passed in as fields will be + * signed as exact match conditions. + * @param callback [Function] + * + * @note All fields passed in when creating presigned post data will be signed + * as exact match conditions. Any fields that will be interpolated by S3 + * must be added to the fields hash after signing, and an appropriate + * condition for such fields must be explicitly added to the Conditions + * array passed to this function before signing. + * + * @example Presiging post data with a known key + * var params = { + * Bucket: 'bucket', + * Fields: { + * key: 'key' + * } + * }; + * s3.createPresignedPost(params, function(err, data) { + * if (err) { + * console.error('Presigning post data encountered an error', err); + * } else { + * console.log('The post data is', data); + * } + * }); + * + * @example Presigning post data with an interpolated key + * var params = { + * Bucket: 'bucket', + * Conditions: [ + * ['starts-with', '$key', 'path/to/uploads/'] + * ] + * }; + * s3.createPresignedPost(params, function(err, data) { + * if (err) { + * console.error('Presigning post data encountered an error', err); + * } else { + * data.Fields.key = 'path/to/uploads/${filename}'; + * console.log('The post data is', data); + * } + * }); + * + * @note You must ensure that you have static or previously resolved + * credentials if you call this method synchronously (with no callback), + * otherwise it may not properly sign the request. If you cannot guarantee + * this (you are using an asynchronous credential provider, i.e., EC2 + * IAM roles), you should always call this method with an asynchronous + * callback. + * + * @return [map] If called synchronously (with no callback), returns a hash + * with the url to set as the form action and a hash of fields + * to include in the form. + * @return [null] Nothing is returned if a callback is provided. + * + * @callback callback function (err, data) + * @param err [Error] the error object returned from the policy signer + * @param data [map] The data necessary to construct an HTML form + * @param data.url [String] The URL to use as the action of the form + * @param data.fields [map] A hash of fields that must be included in the + * form for the upload to succeed. This hash will + * include the signed POST policy, your access key + * ID and security token (if present), etc. These + * may be safely included as input elements of type + * 'hidden.' + */ + createPresignedPost: function createPresignedPost(params, callback) { + if (typeof params === 'function' && callback === undefined) { + callback = params; + params = null; + } + + params = AWS.util.copy(params || {}); + var boundParams = this.config.params || {}; + var bucket = params.Bucket || boundParams.Bucket, + self = this, + config = this.config, + endpoint = AWS.util.copy(this.endpoint); + if (!config.s3BucketEndpoint) { + endpoint.pathname = '/' + bucket; + } + + function finalizePost() { + return { + url: AWS.util.urlFormat(endpoint), + fields: self.preparePostFields( + config.credentials, + config.region, + bucket, + params.Fields, + params.Conditions, + params.Expires + ) + }; + } + + if (callback) { + config.getCredentials(function (err) { + if (err) { + callback(err); + } else { + try { + callback(null, finalizePost()); + } catch (err) { + callback(err); + } + } + }); + } else { + return finalizePost(); + } + }, + + /** + * @api private + */ + preparePostFields: function preparePostFields( + credentials, + region, + bucket, + fields, + conditions, + expiresInSeconds + ) { + var now = this.getSkewCorrectedDate(); + if (!credentials || !region || !bucket) { + throw new Error('Unable to create a POST object policy without a bucket,' + + ' region, and credentials'); + } + fields = AWS.util.copy(fields || {}); + conditions = (conditions || []).slice(0); + expiresInSeconds = expiresInSeconds || 3600; + + var signingDate = AWS.util.date.iso8601(now).replace(/[:\-]|\.\d{3}/g, ''); + var shortDate = signingDate.substr(0, 8); + var scope = v4Credentials.createScope(shortDate, region, 's3'); + var credential = credentials.accessKeyId + '/' + scope; + + fields['bucket'] = bucket; + fields['X-Amz-Algorithm'] = 'AWS4-HMAC-SHA256'; + fields['X-Amz-Credential'] = credential; + fields['X-Amz-Date'] = signingDate; + if (credentials.sessionToken) { + fields['X-Amz-Security-Token'] = credentials.sessionToken; + } + for (var field in fields) { + if (fields.hasOwnProperty(field)) { + var condition = {}; + condition[field] = fields[field]; + conditions.push(condition); + } + } + + fields.Policy = this.preparePostPolicy( + new Date(now.valueOf() + expiresInSeconds * 1000), + conditions + ); + fields['X-Amz-Signature'] = AWS.util.crypto.hmac( + v4Credentials.getSigningKey(credentials, shortDate, region, 's3', true), + fields.Policy, + 'hex' + ); + + return fields; + }, + + /** + * @api private + */ + preparePostPolicy: function preparePostPolicy(expiration, conditions) { + return AWS.util.base64.encode(JSON.stringify({ + expiration: AWS.util.date.iso8601(expiration), + conditions: conditions + })); + }, + + /** + * @api private + */ + prepareSignedUrl: function prepareSignedUrl(request) { + request.addListener('validate', request.service.noPresignedContentLength); + request.removeListener('build', request.service.addContentType); + if (!request.params.Body) { + // no Content-MD5/SHA-256 if body is not provided + request.removeListener('build', request.service.computeContentMd5); + } else { + request.addListener('afterBuild', AWS.EventListeners.Core.COMPUTE_SHA256); + } + }, + + /** + * @api private + * @param request + */ + disableBodySigning: function disableBodySigning(request) { + var headers = request.httpRequest.headers; + // Add the header to anything that isn't a presigned url, unless that presigned url had a body defined + if (!Object.prototype.hasOwnProperty.call(headers, 'presigned-expires')) { + headers['X-Amz-Content-Sha256'] = 'UNSIGNED-PAYLOAD'; + } + }, + + /** + * @api private + */ + noPresignedContentLength: function noPresignedContentLength(request) { + if (request.params.ContentLength !== undefined) { + throw AWS.util.error(new Error(), {code: 'UnexpectedParameter', + message: 'ContentLength is not supported in pre-signed URLs.'}); + } + }, + + createBucket: function createBucket(params, callback) { + // When creating a bucket *outside* the classic region, the location + // constraint must be set for the bucket and it must match the endpoint. + // This chunk of code will set the location constraint param based + // on the region (when possible), but it will not override a passed-in + // location constraint. + if (typeof params === 'function' || !params) { + callback = callback || params; + params = {}; + } + var hostname = this.endpoint.hostname; + // copy params so that appending keys does not unintentioinallly + // mutate params object argument passed in by user + var copiedParams = AWS.util.copy(params); + + if (hostname !== this.api.globalEndpoint && !params.CreateBucketConfiguration) { + copiedParams.CreateBucketConfiguration = { LocationConstraint: this.config.region }; + } + return this.makeRequest('createBucket', copiedParams, callback); + }, + + writeGetObjectResponse: function writeGetObjectResponse(params, callback) { + + var request = this.makeRequest('writeGetObjectResponse', AWS.util.copy(params), callback); + var hostname = this.endpoint.hostname; + if (hostname.indexOf(this.config.region) !== -1) { + // hostname specifies a region already + hostname = hostname.replace('s3.', OBJECT_LAMBDA_SERVICE + '.'); + } else { + // Hostname doesn't have a region. + // Object Lambda requires an explicit region. + hostname = hostname.replace('s3.', OBJECT_LAMBDA_SERVICE + '.' + this.config.region + '.'); + } + + request.httpRequest.endpoint = new AWS.Endpoint(hostname, this.config); + return request; + }, + + /** + * @see AWS.S3.ManagedUpload + * @overload upload(params = {}, [options], [callback]) + * Uploads an arbitrarily sized buffer, blob, or stream, using intelligent + * concurrent handling of parts if the payload is large enough. You can + * configure the concurrent queue size by setting `options`. Note that this + * is the only operation for which the SDK can retry requests with stream + * bodies. + * + * @param (see AWS.S3.putObject) + * @option (see AWS.S3.ManagedUpload.constructor) + * @return [AWS.S3.ManagedUpload] the managed upload object that can call + * `send()` or track progress. + * @example Uploading a stream object + * var params = {Bucket: 'bucket', Key: 'key', Body: stream}; + * s3.upload(params, function(err, data) { + * console.log(err, data); + * }); + * @example Uploading a stream with concurrency of 1 and partSize of 10mb + * var params = {Bucket: 'bucket', Key: 'key', Body: stream}; + * var options = {partSize: 10 * 1024 * 1024, queueSize: 1}; + * s3.upload(params, options, function(err, data) { + * console.log(err, data); + * }); + * @callback callback function(err, data) + * @param err [Error] an error or null if no error occurred. + * @param data [map] The response data from the successful upload: + * @param data.Location [String] the URL of the uploaded object + * @param data.ETag [String] the ETag of the uploaded object + * @param data.Bucket [String] the bucket to which the object was uploaded + * @param data.Key [String] the key to which the object was uploaded + */ + upload: function upload(params, options, callback) { + if (typeof options === 'function' && callback === undefined) { + callback = options; + options = null; + } + + options = options || {}; + options = AWS.util.merge(options || {}, {service: this, params: params}); + + var uploader = new AWS.S3.ManagedUpload(options); + if (typeof callback === 'function') uploader.send(callback); + return uploader; + } +}); + +/** + * @api private + */ +AWS.S3.addPromisesToClass = function addPromisesToClass(PromiseDependency) { + this.prototype.getSignedUrlPromise = AWS.util.promisifyMethod('getSignedUrl', PromiseDependency); +}; + +/** + * @api private + */ +AWS.S3.deletePromisesFromClass = function deletePromisesFromClass() { + delete this.prototype.getSignedUrlPromise; +}; + +AWS.util.addPromises(AWS.S3); + + +/***/ }), + +/***/ 71207: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var s3util = __nccwpck_require__(35895); +var regionUtil = __nccwpck_require__(18262); + +AWS.util.update(AWS.S3Control.prototype, { + /** + * @api private + */ + setupRequestListeners: function setupRequestListeners(request) { + request.addListener('extractError', this.extractHostId); + request.addListener('extractData', this.extractHostId); + request.addListener('validate', this.validateAccountId); + + var isArnInBucket = s3util.isArnInParam(request, 'Bucket'); + var isArnInName = s3util.isArnInParam(request, 'Name'); + + if (isArnInBucket) { + request._parsedArn = AWS.util.ARN.parse(request.params['Bucket']); + request.addListener('validate', this.validateOutpostsBucketArn); + request.addListener('validate', s3util.validateOutpostsArn); + request.addListener('afterBuild', this.addOutpostIdHeader); + } else if (isArnInName) { + request._parsedArn = AWS.util.ARN.parse(request.params['Name']); + request.addListener('validate', s3util.validateOutpostsAccessPointArn); + request.addListener('validate', s3util.validateOutpostsArn); + request.addListener('afterBuild', this.addOutpostIdHeader); + } + + if (isArnInBucket || isArnInName) { + request.addListener('validate', this.validateArnRegion); + request.addListener('validate', this.validateArnAccountWithParams, true); + request.addListener('validate', s3util.validateArnAccount); + request.addListener('validate', s3util.validateArnService); + request.addListener('build', this.populateParamFromArn, true); + request.addListener('build', this.populateUriFromArn); + request.addListener('build', s3util.validatePopulateUriFromArn); + } + + if (request.params.OutpostId && + (request.operation === 'createBucket' || + request.operation === 'listRegionalBuckets')) { + request.addListener('build', this.populateEndpointForOutpostId); + } + }, + + /** + * Adds outpostId header + */ + addOutpostIdHeader: function addOutpostIdHeader(req) { + req.httpRequest.headers['x-amz-outpost-id'] = req._parsedArn.outpostId; + }, + + /** + * Validate Outposts ARN supplied in Bucket parameter is a valid bucket name + */ + validateOutpostsBucketArn: function validateOutpostsBucketArn(req) { + var parsedArn = req._parsedArn; + + //can be ':' or '/' + var delimiter = parsedArn.resource['outpost'.length]; + + if (parsedArn.resource.split(delimiter).length !== 4) { + throw AWS.util.error(new Error(), { + code: 'InvalidARN', + message: 'Bucket ARN should have two resources outpost/{outpostId}/bucket/{accesspointName}' + }); + } + + var bucket = parsedArn.resource.split(delimiter)[3]; + if (!s3util.dnsCompatibleBucketName(bucket) || bucket.match(/\./)) { + throw AWS.util.error(new Error(), { + code: 'InvalidARN', + message: 'Bucket ARN is not DNS compatible. Got ' + bucket + }); + } + + //set parsed valid bucket + req._parsedArn.bucket = bucket; + }, + + /** + * @api private + */ + populateParamFromArn: function populateParamFromArn(req) { + var parsedArn = req._parsedArn; + if (s3util.isArnInParam(req, 'Bucket')) { + req.params.Bucket = parsedArn.bucket; + } else if (s3util.isArnInParam(req, 'Name')) { + req.params.Name = parsedArn.accessPoint; + } + }, + + /** + * Populate URI according to the ARN + */ + populateUriFromArn: function populateUriFromArn(req) { + var parsedArn = req._parsedArn; + + var endpoint = req.httpRequest.endpoint; + var useArnRegion = req.service.config.s3UseArnRegion; + var useFipsEndpoint = req.service.config.useFipsEndpoint; + + endpoint.hostname = [ + 's3-outposts' + (useFipsEndpoint ? '-fips': ''), + useArnRegion ? parsedArn.region : req.service.config.region, + 'amazonaws.com' + ].join('.'); + endpoint.host = endpoint.hostname; + }, + + /** + * @api private + */ + populateEndpointForOutpostId: function populateEndpointForOutpostId(req) { + var endpoint = req.httpRequest.endpoint; + var useFipsEndpoint = req.service.config.useFipsEndpoint; + endpoint.hostname = [ + 's3-outposts' + (useFipsEndpoint ? '-fips': ''), + req.service.config.region, + 'amazonaws.com' + ].join('.'); + endpoint.host = endpoint.hostname; + }, + + /** + * @api private + */ + extractHostId: function(response) { + var hostId = response.httpResponse.headers ? response.httpResponse.headers['x-amz-id-2'] : null; + response.extendedRequestId = hostId; + if (response.error) { + response.error.extendedRequestId = hostId; + } + }, + + /** + * @api private + */ + validateArnRegion: function validateArnRegion(req) { + s3util.validateArnRegion(req, { allowFipsEndpoint: true }); + }, + + /** + * @api private + */ + validateArnAccountWithParams: function validateArnAccountWithParams(req) { + var params = req.params; + var inputModel = req.service.api.operations[req.operation].input; + if (inputModel.members.AccountId) { + var parsedArn = req._parsedArn; + if (parsedArn.accountId) { + if (params.AccountId) { + if (params.AccountId !== parsedArn.accountId) { + throw AWS.util.error( + new Error(), + {code: 'ValidationError', message: 'AccountId in ARN and request params should be same.'} + ); + } + } else { + // Store accountId from ARN in params + params.AccountId = parsedArn.accountId; + } + } + } + }, + + /** + * @api private + */ + validateAccountId: function(request) { + var params = request.params; + if (!Object.prototype.hasOwnProperty.call(params, 'AccountId')) return; + var accountId = params.AccountId; + //validate type + if (typeof accountId !== 'string') { + throw AWS.util.error( + new Error(), + {code: 'ValidationError', message: 'AccountId must be a string.'} + ); + } + //validate length + if (accountId.length < 1 || accountId.length > 63) { + throw AWS.util.error( + new Error(), + {code: 'ValidationError', message: 'AccountId length should be between 1 to 63 characters, inclusive.'} + ); + } + //validate pattern + var hostPattern = /^[a-zA-Z0-9]{1}$|^[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9]$/; + if (!hostPattern.test(accountId)) { + throw AWS.util.error(new Error(), + {code: 'ValidationError', message: 'AccountId should be hostname compatible. AccountId: ' + accountId}); + } + }, + + /** + * @api private + */ + getSigningName: function getSigningName(req) { + var _super = AWS.Service.prototype.getSigningName; + if (req && req._parsedArn && req._parsedArn.service) { + return req._parsedArn.service; + } else if (req.params.OutpostId && + (req.operation === 'createBucket' || + req.operation === 'listRegionalBuckets')) { + return 's3-outposts'; + } else { + return _super.call(this, req); + } + }, +}); + + +/***/ }), + +/***/ 35895: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var regionUtil = __nccwpck_require__(18262); + +var s3util = { + /** + * @api private + */ + isArnInParam: function isArnInParam(req, paramName) { + var inputShape = (req.service.api.operations[req.operation] || {}).input || {}; + var inputMembers = inputShape.members || {}; + if (!req.params[paramName] || !inputMembers[paramName]) return false; + return AWS.util.ARN.validate(req.params[paramName]); + }, + + /** + * Validate service component from ARN supplied in Bucket parameter + */ + validateArnService: function validateArnService(req) { + var parsedArn = req._parsedArn; + + if (parsedArn.service !== 's3' + && parsedArn.service !== 's3-outposts' + && parsedArn.service !== 's3-object-lambda') { + throw AWS.util.error(new Error(), { + code: 'InvalidARN', + message: 'expect \'s3\' or \'s3-outposts\' or \'s3-object-lambda\' in ARN service component' + }); + } + }, + + /** + * Validate account ID from ARN supplied in Bucket parameter is a valid account + */ + validateArnAccount: function validateArnAccount(req) { + var parsedArn = req._parsedArn; + + if (!/[0-9]{12}/.exec(parsedArn.accountId)) { + throw AWS.util.error(new Error(), { + code: 'InvalidARN', + message: 'ARN accountID does not match regex "[0-9]{12}"' + }); + } + }, + + /** + * Validate ARN supplied in Bucket parameter is a valid access point ARN + */ + validateS3AccessPointArn: function validateS3AccessPointArn(req) { + var parsedArn = req._parsedArn; + + //can be ':' or '/' + var delimiter = parsedArn.resource['accesspoint'.length]; + + if (parsedArn.resource.split(delimiter).length !== 2) { + throw AWS.util.error(new Error(), { + code: 'InvalidARN', + message: 'Access Point ARN should have one resource accesspoint/{accesspointName}' + }); + } + + var accessPoint = parsedArn.resource.split(delimiter)[1]; + var accessPointPrefix = accessPoint + '-' + parsedArn.accountId; + if (!s3util.dnsCompatibleBucketName(accessPointPrefix) || accessPointPrefix.match(/\./)) { + throw AWS.util.error(new Error(), { + code: 'InvalidARN', + message: 'Access point resource in ARN is not DNS compatible. Got ' + accessPoint + }); + } + + //set parsed valid access point + req._parsedArn.accessPoint = accessPoint; + }, + + /** + * Validate Outposts ARN supplied in Bucket parameter is a valid outposts ARN + */ + validateOutpostsArn: function validateOutpostsArn(req) { + var parsedArn = req._parsedArn; + + if ( + parsedArn.resource.indexOf('outpost:') !== 0 && + parsedArn.resource.indexOf('outpost/') !== 0 + ) { + throw AWS.util.error(new Error(), { + code: 'InvalidARN', + message: 'ARN resource should begin with \'outpost/\'' + }); + } + + //can be ':' or '/' + var delimiter = parsedArn.resource['outpost'.length]; + var outpostId = parsedArn.resource.split(delimiter)[1]; + var dnsHostRegex = new RegExp(/^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9])$/); + if (!dnsHostRegex.test(outpostId)) { + throw AWS.util.error(new Error(), { + code: 'InvalidARN', + message: 'Outpost resource in ARN is not DNS compatible. Got ' + outpostId + }); + } + req._parsedArn.outpostId = outpostId; + }, + + /** + * Validate Outposts ARN supplied in Bucket parameter is a valid outposts ARN + */ + validateOutpostsAccessPointArn: function validateOutpostsAccessPointArn(req) { + var parsedArn = req._parsedArn; + + //can be ':' or '/' + var delimiter = parsedArn.resource['outpost'.length]; + + if (parsedArn.resource.split(delimiter).length !== 4) { + throw AWS.util.error(new Error(), { + code: 'InvalidARN', + message: 'Outposts ARN should have two resources outpost/{outpostId}/accesspoint/{accesspointName}' + }); + } + + var accessPoint = parsedArn.resource.split(delimiter)[3]; + var accessPointPrefix = accessPoint + '-' + parsedArn.accountId; + if (!s3util.dnsCompatibleBucketName(accessPointPrefix) || accessPointPrefix.match(/\./)) { + throw AWS.util.error(new Error(), { + code: 'InvalidARN', + message: 'Access point resource in ARN is not DNS compatible. Got ' + accessPoint + }); + } + + //set parsed valid access point + req._parsedArn.accessPoint = accessPoint; + }, + + /** + * Validate region field in ARN supplied in Bucket parameter is a valid region + */ + validateArnRegion: function validateArnRegion(req, options) { + if (options === undefined) { + options = {}; + } + + var useArnRegion = s3util.loadUseArnRegionConfig(req); + var regionFromArn = req._parsedArn.region; + var clientRegion = req.service.config.region; + var useFipsEndpoint = req.service.config.useFipsEndpoint; + var allowFipsEndpoint = options.allowFipsEndpoint || false; + + if (!regionFromArn) { + var message = 'ARN region is empty'; + if (req._parsedArn.service === 's3') { + message = message + '\nYou may want to use multi-regional ARN. The feature is not supported in current SDK. ' + + 'You should consider switching to V3(https://github.com/aws/aws-sdk-js-v3).'; + } + throw AWS.util.error(new Error(), { + code: 'InvalidARN', + message: message + }); + } + + if (useFipsEndpoint && !allowFipsEndpoint) { + throw AWS.util.error(new Error(), { + code: 'InvalidConfiguration', + message: 'ARN endpoint is not compatible with FIPS region' + }); + } + + if (regionFromArn.indexOf('fips') >= 0) { + throw AWS.util.error(new Error(), { + code: 'InvalidConfiguration', + message: 'FIPS region not allowed in ARN' + }); + } + + if (!useArnRegion && regionFromArn !== clientRegion) { + throw AWS.util.error(new Error(), { + code: 'InvalidConfiguration', + message: 'Configured region conflicts with access point region' + }); + } else if ( + useArnRegion && + regionUtil.getEndpointSuffix(regionFromArn) !== regionUtil.getEndpointSuffix(clientRegion) + ) { + throw AWS.util.error(new Error(), { + code: 'InvalidConfiguration', + message: 'Configured region and access point region not in same partition' + }); + } + + if (req.service.config.useAccelerateEndpoint) { + throw AWS.util.error(new Error(), { + code: 'InvalidConfiguration', + message: 'useAccelerateEndpoint config is not supported with access point ARN' + }); + } + + if (req._parsedArn.service === 's3-outposts' && req.service.config.useDualstackEndpoint) { + throw AWS.util.error(new Error(), { + code: 'InvalidConfiguration', + message: 'Dualstack is not supported with outposts access point ARN' + }); + } + }, + + loadUseArnRegionConfig: function loadUseArnRegionConfig(req) { + var envName = 'AWS_S3_USE_ARN_REGION'; + var configName = 's3_use_arn_region'; + var useArnRegion = true; + var originalConfig = req.service._originalConfig || {}; + if (req.service.config.s3UseArnRegion !== undefined) { + return req.service.config.s3UseArnRegion; + } else if (originalConfig.s3UseArnRegion !== undefined) { + useArnRegion = originalConfig.s3UseArnRegion === true; + } else if (AWS.util.isNode()) { + //load from environmental variable AWS_USE_ARN_REGION + if (process.env[envName]) { + var value = process.env[envName].trim().toLowerCase(); + if (['false', 'true'].indexOf(value) < 0) { + throw AWS.util.error(new Error(), { + code: 'InvalidConfiguration', + message: envName + ' only accepts true or false. Got ' + process.env[envName], + retryable: false + }); + } + useArnRegion = value === 'true'; + } else { //load from shared config property use_arn_region + var profiles = {}; + var profile = {}; + try { + profiles = AWS.util.getProfilesFromSharedConfig(AWS.util.iniLoader); + profile = profiles[process.env.AWS_PROFILE || AWS.util.defaultProfile]; + } catch (e) {} + if (profile[configName]) { + if (['false', 'true'].indexOf(profile[configName].trim().toLowerCase()) < 0) { + throw AWS.util.error(new Error(), { + code: 'InvalidConfiguration', + message: configName + ' only accepts true or false. Got ' + profile[configName], + retryable: false + }); + } + useArnRegion = profile[configName].trim().toLowerCase() === 'true'; + } + } + } + req.service.config.s3UseArnRegion = useArnRegion; + return useArnRegion; + }, + + /** + * Validations before URI can be populated + */ + validatePopulateUriFromArn: function validatePopulateUriFromArn(req) { + if (req.service._originalConfig && req.service._originalConfig.endpoint) { + throw AWS.util.error(new Error(), { + code: 'InvalidConfiguration', + message: 'Custom endpoint is not compatible with access point ARN' + }); + } + + if (req.service.config.s3ForcePathStyle) { + throw AWS.util.error(new Error(), { + code: 'InvalidConfiguration', + message: 'Cannot construct path-style endpoint with access point' + }); + } + }, + + /** + * Returns true if the bucket name is DNS compatible. Buckets created + * outside of the classic region MUST be DNS compatible. + * + * @api private + */ + dnsCompatibleBucketName: function dnsCompatibleBucketName(bucketName) { + var b = bucketName; + var domain = new RegExp(/^[a-z0-9][a-z0-9\.\-]{1,61}[a-z0-9]$/); + var ipAddress = new RegExp(/(\d+\.){3}\d+/); + var dots = new RegExp(/\.\./); + return (b.match(domain) && !b.match(ipAddress) && !b.match(dots)) ? true : false; + }, +}; + +/** + * @api private + */ +module.exports = s3util; + + +/***/ }), + +/***/ 94571: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +AWS.util.update(AWS.SQS.prototype, { + /** + * @api private + */ + setupRequestListeners: function setupRequestListeners(request) { + request.addListener('build', this.buildEndpoint); + + if (request.service.config.computeChecksums) { + if (request.operation === 'sendMessage') { + request.addListener('extractData', this.verifySendMessageChecksum); + } else if (request.operation === 'sendMessageBatch') { + request.addListener('extractData', this.verifySendMessageBatchChecksum); + } else if (request.operation === 'receiveMessage') { + request.addListener('extractData', this.verifyReceiveMessageChecksum); + } + } + }, + + /** + * @api private + */ + verifySendMessageChecksum: function verifySendMessageChecksum(response) { + if (!response.data) return; + + var md5 = response.data.MD5OfMessageBody; + var body = this.params.MessageBody; + var calculatedMd5 = this.service.calculateChecksum(body); + if (calculatedMd5 !== md5) { + var msg = 'Got "' + response.data.MD5OfMessageBody + + '", expecting "' + calculatedMd5 + '".'; + this.service.throwInvalidChecksumError(response, + [response.data.MessageId], msg); + } + }, + + /** + * @api private + */ + verifySendMessageBatchChecksum: function verifySendMessageBatchChecksum(response) { + if (!response.data) return; + + var service = this.service; + var entries = {}; + var errors = []; + var messageIds = []; + AWS.util.arrayEach(response.data.Successful, function (entry) { + entries[entry.Id] = entry; + }); + AWS.util.arrayEach(this.params.Entries, function (entry) { + if (entries[entry.Id]) { + var md5 = entries[entry.Id].MD5OfMessageBody; + var body = entry.MessageBody; + if (!service.isChecksumValid(md5, body)) { + errors.push(entry.Id); + messageIds.push(entries[entry.Id].MessageId); + } + } + }); + + if (errors.length > 0) { + service.throwInvalidChecksumError(response, messageIds, + 'Invalid messages: ' + errors.join(', ')); + } + }, + + /** + * @api private + */ + verifyReceiveMessageChecksum: function verifyReceiveMessageChecksum(response) { + if (!response.data) return; + + var service = this.service; + var messageIds = []; + AWS.util.arrayEach(response.data.Messages, function(message) { + var md5 = message.MD5OfBody; + var body = message.Body; + if (!service.isChecksumValid(md5, body)) { + messageIds.push(message.MessageId); + } + }); + + if (messageIds.length > 0) { + service.throwInvalidChecksumError(response, messageIds, + 'Invalid messages: ' + messageIds.join(', ')); + } + }, + + /** + * @api private + */ + throwInvalidChecksumError: function throwInvalidChecksumError(response, ids, message) { + response.error = AWS.util.error(new Error(), { + retryable: true, + code: 'InvalidChecksum', + messageIds: ids, + message: response.request.operation + + ' returned an invalid MD5 response. ' + message + }); + }, + + /** + * @api private + */ + isChecksumValid: function isChecksumValid(checksum, data) { + return this.calculateChecksum(data) === checksum; + }, + + /** + * @api private + */ + calculateChecksum: function calculateChecksum(data) { + return AWS.util.crypto.md5(data, 'hex'); + }, + + /** + * @api private + */ + buildEndpoint: function buildEndpoint(request) { + var url = request.httpRequest.params.QueueUrl; + if (url) { + request.httpRequest.endpoint = new AWS.Endpoint(url); + + // signature version 4 requires the region name to be set, + // sqs queue urls contain the region name + var matches = request.httpRequest.endpoint.host.match(/^sqs\.(.+?)\./); + if (matches) request.httpRequest.region = matches[1]; + } + } +}); + + +/***/ }), + +/***/ 91055: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var resolveRegionalEndpointsFlag = __nccwpck_require__(85566); +var ENV_REGIONAL_ENDPOINT_ENABLED = 'AWS_STS_REGIONAL_ENDPOINTS'; +var CONFIG_REGIONAL_ENDPOINT_ENABLED = 'sts_regional_endpoints'; + +AWS.util.update(AWS.STS.prototype, { + /** + * @overload credentialsFrom(data, credentials = null) + * Creates a credentials object from STS response data containing + * credentials information. Useful for quickly setting AWS credentials. + * + * @note This is a low-level utility function. If you want to load temporary + * credentials into your process for subsequent requests to AWS resources, + * you should use {AWS.TemporaryCredentials} instead. + * @param data [map] data retrieved from a call to {getFederatedToken}, + * {getSessionToken}, {assumeRole}, or {assumeRoleWithWebIdentity}. + * @param credentials [AWS.Credentials] an optional credentials object to + * fill instead of creating a new object. Useful when modifying an + * existing credentials object from a refresh call. + * @return [AWS.TemporaryCredentials] the set of temporary credentials + * loaded from a raw STS operation response. + * @example Using credentialsFrom to load global AWS credentials + * var sts = new AWS.STS(); + * sts.getSessionToken(function (err, data) { + * if (err) console.log("Error getting credentials"); + * else { + * AWS.config.credentials = sts.credentialsFrom(data); + * } + * }); + * @see AWS.TemporaryCredentials + */ + credentialsFrom: function credentialsFrom(data, credentials) { + if (!data) return null; + if (!credentials) credentials = new AWS.TemporaryCredentials(); + credentials.expired = false; + credentials.accessKeyId = data.Credentials.AccessKeyId; + credentials.secretAccessKey = data.Credentials.SecretAccessKey; + credentials.sessionToken = data.Credentials.SessionToken; + credentials.expireTime = data.Credentials.Expiration; + return credentials; + }, + + assumeRoleWithWebIdentity: function assumeRoleWithWebIdentity(params, callback) { + return this.makeUnauthenticatedRequest('assumeRoleWithWebIdentity', params, callback); + }, + + assumeRoleWithSAML: function assumeRoleWithSAML(params, callback) { + return this.makeUnauthenticatedRequest('assumeRoleWithSAML', params, callback); + }, + + /** + * @api private + */ + setupRequestListeners: function setupRequestListeners(request) { + request.addListener('validate', this.optInRegionalEndpoint, true); + }, + + /** + * @api private + */ + optInRegionalEndpoint: function optInRegionalEndpoint(req) { + var service = req.service; + var config = service.config; + config.stsRegionalEndpoints = resolveRegionalEndpointsFlag(service._originalConfig, { + env: ENV_REGIONAL_ENDPOINT_ENABLED, + sharedConfig: CONFIG_REGIONAL_ENDPOINT_ENABLED, + clientConfig: 'stsRegionalEndpoints' + }); + if ( + config.stsRegionalEndpoints === 'regional' && + service.isGlobalEndpoint + ) { + //client will throw if region is not supplied; request will be signed with specified region + if (!config.region) { + throw AWS.util.error(new Error(), + {code: 'ConfigError', message: 'Missing region in config'}); + } + var insertPoint = config.endpoint.indexOf('.amazonaws.com'); + var regionalEndpoint = config.endpoint.substring(0, insertPoint) + + '.' + config.region + config.endpoint.substring(insertPoint); + req.httpRequest.updateEndpoint(regionalEndpoint); + req.httpRequest.region = config.region; + } + } + +}); + + +/***/ }), + +/***/ 31987: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +AWS.util.hideProperties(AWS, ['SimpleWorkflow']); + +/** + * @constant + * @readonly + * Backwards compatibility for access to the {AWS.SWF} service class. + */ +AWS.SimpleWorkflow = AWS.SWF; + + +/***/ }), + +/***/ 29697: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var IniLoader = (__nccwpck_require__(95417).IniLoader); +/** + * Singleton object to load specified config/credentials files. + * It will cache all the files ever loaded; + */ +module.exports.b = new IniLoader(); + + +/***/ }), + +/***/ 95417: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var os = __nccwpck_require__(22037); +var path = __nccwpck_require__(71017); + +function parseFile(filename) { + return AWS.util.ini.parse(AWS.util.readFileSync(filename)); +} + +function getProfiles(fileContent) { + var tmpContent = {}; + Object.keys(fileContent).forEach(function(sectionName) { + if (/^sso-session\s/.test(sectionName)) return; + Object.defineProperty(tmpContent, sectionName.replace(/^profile\s/, ''), { + value: fileContent[sectionName], + enumerable: true + }); + }); + return tmpContent; +} + +function getSsoSessions(fileContent) { + var tmpContent = {}; + Object.keys(fileContent).forEach(function(sectionName) { + if (!/^sso-session\s/.test(sectionName)) return; + Object.defineProperty(tmpContent, sectionName.replace(/^sso-session\s/, ''), { + value: fileContent[sectionName], + enumerable: true + }); + }); + return tmpContent; +} + +/** + * Ini file loader class the same as that used in the SDK. It loads and + * parses config and credentials files in .ini format and cache the content + * to assure files are only read once. + * Note that calling operations on the instance instantiated from this class + * won't affect the behavior of SDK since SDK uses an internal singleton of + * this class. + * @!macro nobrowser + */ +AWS.IniLoader = AWS.util.inherit({ + constructor: function IniLoader() { + this.resolvedProfiles = {}; + this.resolvedSsoSessions = {}; + }, + + /** Remove all cached files. Used after config files are updated. */ + clearCachedFiles: function clearCachedFiles() { + this.resolvedProfiles = {}; + this.resolvedSsoSessions = {}; + }, + + /** + * Load configurations from config/credentials files and cache them + * for later use. If no file is specified it will try to load default files. + * + * @param options [map] information describing the file + * @option options filename [String] ('~/.aws/credentials' or defined by + * AWS_SHARED_CREDENTIALS_FILE process env var or '~/.aws/config' if + * isConfig is set to true) + * path to the file to be read. + * @option options isConfig [Boolean] (false) True to read config file. + * @return [map] object containing contents from file in key-value + * pairs. + */ + loadFrom: function loadFrom(options) { + options = options || {}; + var isConfig = options.isConfig === true; + var filename = options.filename || this.getDefaultFilePath(isConfig); + if (!this.resolvedProfiles[filename]) { + var fileContent = parseFile(filename); + if (isConfig) { + Object.defineProperty(this.resolvedProfiles, filename, { + value: getProfiles(fileContent) + }); + } else { + Object.defineProperty(this.resolvedProfiles, filename, { value: fileContent }); + } + } + return this.resolvedProfiles[filename]; + }, + + /** + * Load sso sessions from config/credentials files and cache them + * for later use. If no file is specified it will try to load default file. + * + * @param options [map] information describing the file + * @option options filename [String] ('~/.aws/config' or defined by + * AWS_CONFIG_FILE process env var) + * @return [map] object containing contents from file in key-value + * pairs. + */ + loadSsoSessionsFrom: function loadSsoSessionsFrom(options) { + options = options || {}; + var filename = options.filename || this.getDefaultFilePath(true); + if (!this.resolvedSsoSessions[filename]) { + var fileContent = parseFile(filename); + Object.defineProperty(this.resolvedSsoSessions, filename, { + value: getSsoSessions(fileContent) + }); + } + return this.resolvedSsoSessions[filename]; + }, + + /** + * @api private + */ + getDefaultFilePath: function getDefaultFilePath(isConfig) { + return path.join( + this.getHomeDir(), + '.aws', + isConfig ? 'config' : 'credentials' + ); + }, + + /** + * @api private + */ + getHomeDir: function getHomeDir() { + var env = process.env; + var home = env.HOME || + env.USERPROFILE || + (env.HOMEPATH ? ((env.HOMEDRIVE || 'C:/') + env.HOMEPATH) : null); + + if (home) { + return home; + } + + if (typeof os.homedir === 'function') { + return os.homedir(); + } + + throw AWS.util.error( + new Error('Cannot load credentials, HOME path not set') + ); + } +}); + +var IniLoader = AWS.IniLoader; + +module.exports = { + IniLoader: IniLoader +}; + + +/***/ }), + +/***/ 98382: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +/** + * @api private + */ +AWS.Signers.Bearer = AWS.util.inherit(AWS.Signers.RequestSigner, { + constructor: function Bearer(request) { + AWS.Signers.RequestSigner.call(this, request); + }, + + addAuthorization: function addAuthorization(token) { + this.request.headers['Authorization'] = 'Bearer ' + token.token; + } +}); + + +/***/ }), + +/***/ 60328: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var inherit = AWS.util.inherit; + +/** + * @api private + */ +var expiresHeader = 'presigned-expires'; + +/** + * @api private + */ +function signedUrlBuilder(request) { + var expires = request.httpRequest.headers[expiresHeader]; + var signerClass = request.service.getSignerClass(request); + + delete request.httpRequest.headers['User-Agent']; + delete request.httpRequest.headers['X-Amz-User-Agent']; + + if (signerClass === AWS.Signers.V4) { + if (expires > 604800) { // one week expiry is invalid + var message = 'Presigning does not support expiry time greater ' + + 'than a week with SigV4 signing.'; + throw AWS.util.error(new Error(), { + code: 'InvalidExpiryTime', message: message, retryable: false + }); + } + request.httpRequest.headers[expiresHeader] = expires; + } else if (signerClass === AWS.Signers.S3) { + var now = request.service ? request.service.getSkewCorrectedDate() : AWS.util.date.getDate(); + request.httpRequest.headers[expiresHeader] = parseInt( + AWS.util.date.unixTimestamp(now) + expires, 10).toString(); + } else { + throw AWS.util.error(new Error(), { + message: 'Presigning only supports S3 or SigV4 signing.', + code: 'UnsupportedSigner', retryable: false + }); + } +} + +/** + * @api private + */ +function signedUrlSigner(request) { + var endpoint = request.httpRequest.endpoint; + var parsedUrl = AWS.util.urlParse(request.httpRequest.path); + var queryParams = {}; + + if (parsedUrl.search) { + queryParams = AWS.util.queryStringParse(parsedUrl.search.substr(1)); + } + + var auth = request.httpRequest.headers['Authorization'].split(' '); + if (auth[0] === 'AWS') { + auth = auth[1].split(':'); + queryParams['Signature'] = auth.pop(); + queryParams['AWSAccessKeyId'] = auth.join(':'); + + AWS.util.each(request.httpRequest.headers, function (key, value) { + if (key === expiresHeader) key = 'Expires'; + if (key.indexOf('x-amz-meta-') === 0) { + // Delete existing, potentially not normalized key + delete queryParams[key]; + key = key.toLowerCase(); + } + queryParams[key] = value; + }); + delete request.httpRequest.headers[expiresHeader]; + delete queryParams['Authorization']; + delete queryParams['Host']; + } else if (auth[0] === 'AWS4-HMAC-SHA256') { // SigV4 signing + auth.shift(); + var rest = auth.join(' '); + var signature = rest.match(/Signature=(.*?)(?:,|\s|\r?\n|$)/)[1]; + queryParams['X-Amz-Signature'] = signature; + delete queryParams['Expires']; + } + + // build URL + endpoint.pathname = parsedUrl.pathname; + endpoint.search = AWS.util.queryParamsToString(queryParams); +} + +/** + * @api private + */ +AWS.Signers.Presign = inherit({ + /** + * @api private + */ + sign: function sign(request, expireTime, callback) { + request.httpRequest.headers[expiresHeader] = expireTime || 3600; + request.on('build', signedUrlBuilder); + request.on('sign', signedUrlSigner); + request.removeListener('afterBuild', + AWS.EventListeners.Core.SET_CONTENT_LENGTH); + request.removeListener('afterBuild', + AWS.EventListeners.Core.COMPUTE_SHA256); + + request.emit('beforePresign', [request]); + + if (callback) { + request.build(function() { + if (this.response.error) callback(this.response.error); + else { + callback(null, AWS.util.urlFormat(request.httpRequest.endpoint)); + } + }); + } else { + request.build(); + if (request.response.error) throw request.response.error; + return AWS.util.urlFormat(request.httpRequest.endpoint); + } + } +}); + +/** + * @api private + */ +module.exports = AWS.Signers.Presign; + + +/***/ }), + +/***/ 9897: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +var inherit = AWS.util.inherit; + +/** + * @api private + */ +AWS.Signers.RequestSigner = inherit({ + constructor: function RequestSigner(request) { + this.request = request; + }, + + setServiceClientId: function setServiceClientId(id) { + this.serviceClientId = id; + }, + + getServiceClientId: function getServiceClientId() { + return this.serviceClientId; + } +}); + +AWS.Signers.RequestSigner.getVersion = function getVersion(version) { + switch (version) { + case 'v2': return AWS.Signers.V2; + case 'v3': return AWS.Signers.V3; + case 's3v4': return AWS.Signers.V4; + case 'v4': return AWS.Signers.V4; + case 's3': return AWS.Signers.S3; + case 'v3https': return AWS.Signers.V3Https; + case 'bearer': return AWS.Signers.Bearer; + } + throw new Error('Unknown signing version ' + version); +}; + +__nccwpck_require__(28489); +__nccwpck_require__(66458); +__nccwpck_require__(24473); +__nccwpck_require__(26529); +__nccwpck_require__(58616); +__nccwpck_require__(60328); +__nccwpck_require__(98382); + + +/***/ }), + +/***/ 58616: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var inherit = AWS.util.inherit; + +/** + * @api private + */ +AWS.Signers.S3 = inherit(AWS.Signers.RequestSigner, { + /** + * When building the stringToSign, these sub resource params should be + * part of the canonical resource string with their NON-decoded values + */ + subResources: { + 'acl': 1, + 'accelerate': 1, + 'analytics': 1, + 'cors': 1, + 'lifecycle': 1, + 'delete': 1, + 'inventory': 1, + 'location': 1, + 'logging': 1, + 'metrics': 1, + 'notification': 1, + 'partNumber': 1, + 'policy': 1, + 'requestPayment': 1, + 'replication': 1, + 'restore': 1, + 'tagging': 1, + 'torrent': 1, + 'uploadId': 1, + 'uploads': 1, + 'versionId': 1, + 'versioning': 1, + 'versions': 1, + 'website': 1 + }, + + // when building the stringToSign, these querystring params should be + // part of the canonical resource string with their NON-encoded values + responseHeaders: { + 'response-content-type': 1, + 'response-content-language': 1, + 'response-expires': 1, + 'response-cache-control': 1, + 'response-content-disposition': 1, + 'response-content-encoding': 1 + }, + + addAuthorization: function addAuthorization(credentials, date) { + if (!this.request.headers['presigned-expires']) { + this.request.headers['X-Amz-Date'] = AWS.util.date.rfc822(date); + } + + if (credentials.sessionToken) { + // presigned URLs require this header to be lowercased + this.request.headers['x-amz-security-token'] = credentials.sessionToken; + } + + var signature = this.sign(credentials.secretAccessKey, this.stringToSign()); + var auth = 'AWS ' + credentials.accessKeyId + ':' + signature; + + this.request.headers['Authorization'] = auth; + }, + + stringToSign: function stringToSign() { + var r = this.request; + + var parts = []; + parts.push(r.method); + parts.push(r.headers['Content-MD5'] || ''); + parts.push(r.headers['Content-Type'] || ''); + + // This is the "Date" header, but we use X-Amz-Date. + // The S3 signing mechanism requires us to pass an empty + // string for this Date header regardless. + parts.push(r.headers['presigned-expires'] || ''); + + var headers = this.canonicalizedAmzHeaders(); + if (headers) parts.push(headers); + parts.push(this.canonicalizedResource()); + + return parts.join('\n'); + + }, + + canonicalizedAmzHeaders: function canonicalizedAmzHeaders() { + + var amzHeaders = []; + + AWS.util.each(this.request.headers, function (name) { + if (name.match(/^x-amz-/i)) + amzHeaders.push(name); + }); + + amzHeaders.sort(function (a, b) { + return a.toLowerCase() < b.toLowerCase() ? -1 : 1; + }); + + var parts = []; + AWS.util.arrayEach.call(this, amzHeaders, function (name) { + parts.push(name.toLowerCase() + ':' + String(this.request.headers[name])); + }); + + return parts.join('\n'); + + }, + + canonicalizedResource: function canonicalizedResource() { + + var r = this.request; + + var parts = r.path.split('?'); + var path = parts[0]; + var querystring = parts[1]; + + var resource = ''; + + if (r.virtualHostedBucket) + resource += '/' + r.virtualHostedBucket; + + resource += path; + + if (querystring) { + + // collect a list of sub resources and query params that need to be signed + var resources = []; + + AWS.util.arrayEach.call(this, querystring.split('&'), function (param) { + var name = param.split('=')[0]; + var value = param.split('=')[1]; + if (this.subResources[name] || this.responseHeaders[name]) { + var subresource = { name: name }; + if (value !== undefined) { + if (this.subResources[name]) { + subresource.value = value; + } else { + subresource.value = decodeURIComponent(value); + } + } + resources.push(subresource); + } + }); + + resources.sort(function (a, b) { return a.name < b.name ? -1 : 1; }); + + if (resources.length) { + + querystring = []; + AWS.util.arrayEach(resources, function (res) { + if (res.value === undefined) { + querystring.push(res.name); + } else { + querystring.push(res.name + '=' + res.value); + } + }); + + resource += '?' + querystring.join('&'); + } + + } + + return resource; + + }, + + sign: function sign(secret, string) { + return AWS.util.crypto.hmac(secret, string, 'base64', 'sha1'); + } +}); + +/** + * @api private + */ +module.exports = AWS.Signers.S3; + + +/***/ }), + +/***/ 28489: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var inherit = AWS.util.inherit; + +/** + * @api private + */ +AWS.Signers.V2 = inherit(AWS.Signers.RequestSigner, { + addAuthorization: function addAuthorization(credentials, date) { + + if (!date) date = AWS.util.date.getDate(); + + var r = this.request; + + r.params.Timestamp = AWS.util.date.iso8601(date); + r.params.SignatureVersion = '2'; + r.params.SignatureMethod = 'HmacSHA256'; + r.params.AWSAccessKeyId = credentials.accessKeyId; + + if (credentials.sessionToken) { + r.params.SecurityToken = credentials.sessionToken; + } + + delete r.params.Signature; // delete old Signature for re-signing + r.params.Signature = this.signature(credentials); + + r.body = AWS.util.queryParamsToString(r.params); + r.headers['Content-Length'] = r.body.length; + }, + + signature: function signature(credentials) { + return AWS.util.crypto.hmac(credentials.secretAccessKey, this.stringToSign(), 'base64'); + }, + + stringToSign: function stringToSign() { + var parts = []; + parts.push(this.request.method); + parts.push(this.request.endpoint.host.toLowerCase()); + parts.push(this.request.pathname()); + parts.push(AWS.util.queryParamsToString(this.request.params)); + return parts.join('\n'); + } + +}); + +/** + * @api private + */ +module.exports = AWS.Signers.V2; + + +/***/ }), + +/***/ 66458: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var inherit = AWS.util.inherit; + +/** + * @api private + */ +AWS.Signers.V3 = inherit(AWS.Signers.RequestSigner, { + addAuthorization: function addAuthorization(credentials, date) { + + var datetime = AWS.util.date.rfc822(date); + + this.request.headers['X-Amz-Date'] = datetime; + + if (credentials.sessionToken) { + this.request.headers['x-amz-security-token'] = credentials.sessionToken; + } + + this.request.headers['X-Amzn-Authorization'] = + this.authorization(credentials, datetime); + + }, + + authorization: function authorization(credentials) { + return 'AWS3 ' + + 'AWSAccessKeyId=' + credentials.accessKeyId + ',' + + 'Algorithm=HmacSHA256,' + + 'SignedHeaders=' + this.signedHeaders() + ',' + + 'Signature=' + this.signature(credentials); + }, + + signedHeaders: function signedHeaders() { + var headers = []; + AWS.util.arrayEach(this.headersToSign(), function iterator(h) { + headers.push(h.toLowerCase()); + }); + return headers.sort().join(';'); + }, + + canonicalHeaders: function canonicalHeaders() { + var headers = this.request.headers; + var parts = []; + AWS.util.arrayEach(this.headersToSign(), function iterator(h) { + parts.push(h.toLowerCase().trim() + ':' + String(headers[h]).trim()); + }); + return parts.sort().join('\n') + '\n'; + }, + + headersToSign: function headersToSign() { + var headers = []; + AWS.util.each(this.request.headers, function iterator(k) { + if (k === 'Host' || k === 'Content-Encoding' || k.match(/^X-Amz/i)) { + headers.push(k); + } + }); + return headers; + }, + + signature: function signature(credentials) { + return AWS.util.crypto.hmac(credentials.secretAccessKey, this.stringToSign(), 'base64'); + }, + + stringToSign: function stringToSign() { + var parts = []; + parts.push(this.request.method); + parts.push('/'); + parts.push(''); + parts.push(this.canonicalHeaders()); + parts.push(this.request.body); + return AWS.util.crypto.sha256(parts.join('\n')); + } + +}); + +/** + * @api private + */ +module.exports = AWS.Signers.V3; + + +/***/ }), + +/***/ 24473: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var inherit = AWS.util.inherit; + +__nccwpck_require__(66458); + +/** + * @api private + */ +AWS.Signers.V3Https = inherit(AWS.Signers.V3, { + authorization: function authorization(credentials) { + return 'AWS3-HTTPS ' + + 'AWSAccessKeyId=' + credentials.accessKeyId + ',' + + 'Algorithm=HmacSHA256,' + + 'Signature=' + this.signature(credentials); + }, + + stringToSign: function stringToSign() { + return this.request.headers['X-Amz-Date']; + } +}); + +/** + * @api private + */ +module.exports = AWS.Signers.V3Https; + + +/***/ }), + +/***/ 26529: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var v4Credentials = __nccwpck_require__(62660); +var inherit = AWS.util.inherit; + +/** + * @api private + */ +var expiresHeader = 'presigned-expires'; + +/** + * @api private + */ +AWS.Signers.V4 = inherit(AWS.Signers.RequestSigner, { + constructor: function V4(request, serviceName, options) { + AWS.Signers.RequestSigner.call(this, request); + this.serviceName = serviceName; + options = options || {}; + this.signatureCache = typeof options.signatureCache === 'boolean' ? options.signatureCache : true; + this.operation = options.operation; + this.signatureVersion = options.signatureVersion; + }, + + algorithm: 'AWS4-HMAC-SHA256', + + addAuthorization: function addAuthorization(credentials, date) { + var datetime = AWS.util.date.iso8601(date).replace(/[:\-]|\.\d{3}/g, ''); + + if (this.isPresigned()) { + this.updateForPresigned(credentials, datetime); + } else { + this.addHeaders(credentials, datetime); + } + + this.request.headers['Authorization'] = + this.authorization(credentials, datetime); + }, + + addHeaders: function addHeaders(credentials, datetime) { + this.request.headers['X-Amz-Date'] = datetime; + if (credentials.sessionToken) { + this.request.headers['x-amz-security-token'] = credentials.sessionToken; + } + }, + + updateForPresigned: function updateForPresigned(credentials, datetime) { + var credString = this.credentialString(datetime); + var qs = { + 'X-Amz-Date': datetime, + 'X-Amz-Algorithm': this.algorithm, + 'X-Amz-Credential': credentials.accessKeyId + '/' + credString, + 'X-Amz-Expires': this.request.headers[expiresHeader], + 'X-Amz-SignedHeaders': this.signedHeaders() + }; + + if (credentials.sessionToken) { + qs['X-Amz-Security-Token'] = credentials.sessionToken; + } + + if (this.request.headers['Content-Type']) { + qs['Content-Type'] = this.request.headers['Content-Type']; + } + if (this.request.headers['Content-MD5']) { + qs['Content-MD5'] = this.request.headers['Content-MD5']; + } + if (this.request.headers['Cache-Control']) { + qs['Cache-Control'] = this.request.headers['Cache-Control']; + } + + // need to pull in any other X-Amz-* headers + AWS.util.each.call(this, this.request.headers, function(key, value) { + if (key === expiresHeader) return; + if (this.isSignableHeader(key)) { + var lowerKey = key.toLowerCase(); + // Metadata should be normalized + if (lowerKey.indexOf('x-amz-meta-') === 0) { + qs[lowerKey] = value; + } else if (lowerKey.indexOf('x-amz-') === 0) { + qs[key] = value; + } + } + }); + + var sep = this.request.path.indexOf('?') >= 0 ? '&' : '?'; + this.request.path += sep + AWS.util.queryParamsToString(qs); + }, + + authorization: function authorization(credentials, datetime) { + var parts = []; + var credString = this.credentialString(datetime); + parts.push(this.algorithm + ' Credential=' + + credentials.accessKeyId + '/' + credString); + parts.push('SignedHeaders=' + this.signedHeaders()); + parts.push('Signature=' + this.signature(credentials, datetime)); + return parts.join(', '); + }, + + signature: function signature(credentials, datetime) { + var signingKey = v4Credentials.getSigningKey( + credentials, + datetime.substr(0, 8), + this.request.region, + this.serviceName, + this.signatureCache + ); + return AWS.util.crypto.hmac(signingKey, this.stringToSign(datetime), 'hex'); + }, + + stringToSign: function stringToSign(datetime) { + var parts = []; + parts.push('AWS4-HMAC-SHA256'); + parts.push(datetime); + parts.push(this.credentialString(datetime)); + parts.push(this.hexEncodedHash(this.canonicalString())); + return parts.join('\n'); + }, + + canonicalString: function canonicalString() { + var parts = [], pathname = this.request.pathname(); + if (this.serviceName !== 's3' && this.signatureVersion !== 's3v4') pathname = AWS.util.uriEscapePath(pathname); + + parts.push(this.request.method); + parts.push(pathname); + parts.push(this.request.search()); + parts.push(this.canonicalHeaders() + '\n'); + parts.push(this.signedHeaders()); + parts.push(this.hexEncodedBodyHash()); + return parts.join('\n'); + }, + + canonicalHeaders: function canonicalHeaders() { + var headers = []; + AWS.util.each.call(this, this.request.headers, function (key, item) { + headers.push([key, item]); + }); + headers.sort(function (a, b) { + return a[0].toLowerCase() < b[0].toLowerCase() ? -1 : 1; + }); + var parts = []; + AWS.util.arrayEach.call(this, headers, function (item) { + var key = item[0].toLowerCase(); + if (this.isSignableHeader(key)) { + var value = item[1]; + if (typeof value === 'undefined' || value === null || typeof value.toString !== 'function') { + throw AWS.util.error(new Error('Header ' + key + ' contains invalid value'), { + code: 'InvalidHeader' + }); + } + parts.push(key + ':' + + this.canonicalHeaderValues(value.toString())); + } + }); + return parts.join('\n'); + }, + + canonicalHeaderValues: function canonicalHeaderValues(values) { + return values.replace(/\s+/g, ' ').replace(/^\s+|\s+$/g, ''); + }, + + signedHeaders: function signedHeaders() { + var keys = []; + AWS.util.each.call(this, this.request.headers, function (key) { + key = key.toLowerCase(); + if (this.isSignableHeader(key)) keys.push(key); + }); + return keys.sort().join(';'); + }, + + credentialString: function credentialString(datetime) { + return v4Credentials.createScope( + datetime.substr(0, 8), + this.request.region, + this.serviceName + ); + }, + + hexEncodedHash: function hash(string) { + return AWS.util.crypto.sha256(string, 'hex'); + }, + + hexEncodedBodyHash: function hexEncodedBodyHash() { + var request = this.request; + if (this.isPresigned() && (['s3', 's3-object-lambda'].indexOf(this.serviceName) > -1) && !request.body) { + return 'UNSIGNED-PAYLOAD'; + } else if (request.headers['X-Amz-Content-Sha256']) { + return request.headers['X-Amz-Content-Sha256']; + } else { + return this.hexEncodedHash(this.request.body || ''); + } + }, + + unsignableHeaders: [ + 'authorization', + 'content-type', + 'content-length', + 'user-agent', + expiresHeader, + 'expect', + 'x-amzn-trace-id' + ], + + isSignableHeader: function isSignableHeader(key) { + if (key.toLowerCase().indexOf('x-amz-') === 0) return true; + return this.unsignableHeaders.indexOf(key) < 0; + }, + + isPresigned: function isPresigned() { + return this.request.headers[expiresHeader] ? true : false; + } + +}); + +/** + * @api private + */ +module.exports = AWS.Signers.V4; + + +/***/ }), + +/***/ 62660: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +/** + * @api private + */ +var cachedSecret = {}; + +/** + * @api private + */ +var cacheQueue = []; + +/** + * @api private + */ +var maxCacheEntries = 50; + +/** + * @api private + */ +var v4Identifier = 'aws4_request'; + +/** + * @api private + */ +module.exports = { + /** + * @api private + * + * @param date [String] + * @param region [String] + * @param serviceName [String] + * @return [String] + */ + createScope: function createScope(date, region, serviceName) { + return [ + date.substr(0, 8), + region, + serviceName, + v4Identifier + ].join('/'); + }, + + /** + * @api private + * + * @param credentials [Credentials] + * @param date [String] + * @param region [String] + * @param service [String] + * @param shouldCache [Boolean] + * @return [String] + */ + getSigningKey: function getSigningKey( + credentials, + date, + region, + service, + shouldCache + ) { + var credsIdentifier = AWS.util.crypto + .hmac(credentials.secretAccessKey, credentials.accessKeyId, 'base64'); + var cacheKey = [credsIdentifier, date, region, service].join('_'); + shouldCache = shouldCache !== false; + if (shouldCache && (cacheKey in cachedSecret)) { + return cachedSecret[cacheKey]; + } + + var kDate = AWS.util.crypto.hmac( + 'AWS4' + credentials.secretAccessKey, + date, + 'buffer' + ); + var kRegion = AWS.util.crypto.hmac(kDate, region, 'buffer'); + var kService = AWS.util.crypto.hmac(kRegion, service, 'buffer'); + + var signingKey = AWS.util.crypto.hmac(kService, v4Identifier, 'buffer'); + if (shouldCache) { + cachedSecret[cacheKey] = signingKey; + cacheQueue.push(cacheKey); + if (cacheQueue.length > maxCacheEntries) { + // remove the oldest entry (not the least recently used) + delete cachedSecret[cacheQueue.shift()]; + } + } + + return signingKey; + }, + + /** + * @api private + * + * Empties the derived signing key cache. Made available for testing purposes + * only. + */ + emptyCache: function emptyCache() { + cachedSecret = {}; + cacheQueue = []; + } +}; + + +/***/ }), + +/***/ 68118: +/***/ ((module) => { + +function AcceptorStateMachine(states, state) { + this.currentState = state || null; + this.states = states || {}; +} + +AcceptorStateMachine.prototype.runTo = function runTo(finalState, done, bindObject, inputError) { + if (typeof finalState === 'function') { + inputError = bindObject; bindObject = done; + done = finalState; finalState = null; + } + + var self = this; + var state = self.states[self.currentState]; + state.fn.call(bindObject || self, inputError, function(err) { + if (err) { + if (state.fail) self.currentState = state.fail; + else return done ? done.call(bindObject, err) : null; + } else { + if (state.accept) self.currentState = state.accept; + else return done ? done.call(bindObject) : null; + } + if (self.currentState === finalState) { + return done ? done.call(bindObject, err) : null; + } + + self.runTo(finalState, done, bindObject, err); + }); +}; + +AcceptorStateMachine.prototype.addState = function addState(name, acceptState, failState, fn) { + if (typeof acceptState === 'function') { + fn = acceptState; acceptState = null; failState = null; + } else if (typeof failState === 'function') { + fn = failState; failState = null; + } + + if (!this.currentState) this.currentState = name; + this.states[name] = { accept: acceptState, fail: failState, fn: fn }; + return this; +}; + +/** + * @api private + */ +module.exports = AcceptorStateMachine; + + +/***/ }), + +/***/ 82647: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +/** + * Represents AWS token object, which contains {token}, and optional + * {expireTime}. + * Creating a `Token` object allows you to pass around your + * token to configuration and service objects. + * + * Note that this class typically does not need to be constructed manually, + * as the {AWS.Config} and {AWS.Service} classes both accept simple + * options hashes with the two keys. The token from this object will be used + * automatically in operations which require them. + * + * ## Expiring and Refreshing Token + * + * Occasionally token can expire in the middle of a long-running + * application. In this case, the SDK will automatically attempt to + * refresh the token from the storage location if the Token + * class implements the {refresh} method. + * + * If you are implementing a token storage location, you + * will want to create a subclass of the `Token` class and + * override the {refresh} method. This method allows token to be + * retrieved from the backing store, be it a file system, database, or + * some network storage. The method should reset the token attributes + * on the object. + * + * @!attribute token + * @return [String] represents the literal token string. This will typically + * be a base64 encoded string. + * @!attribute expireTime + * @return [Date] a time when token should be considered expired. Used + * in conjunction with {expired}. + * @!attribute expired + * @return [Boolean] whether the token is expired and require a refresh. Used + * in conjunction with {expireTime}. + */ +AWS.Token = AWS.util.inherit({ + /** + * Creates a Token object with a given set of information in options hash. + * @option options token [String] represents the literal token string. + * @option options expireTime [Date] field representing the time at which + * the token expires. + * @example Create a token object + * var token = new AWS.Token({ token: 'token' }); + */ + constructor: function Token(options) { + // hide token from being displayed with util.inspect + AWS.util.hideProperties(this, ['token']); + + this.expired = false; + this.expireTime = null; + this.refreshCallbacks = []; + if (arguments.length === 1) { + var options = arguments[0]; + this.token = options.token; + this.expireTime = options.expireTime; + } + }, + + /** + * @return [Integer] the number of seconds before {expireTime} during which + * the token will be considered expired. + */ + expiryWindow: 15, + + /** + * @return [Boolean] whether the Token object should call {refresh} + * @note Subclasses should override this method to provide custom refresh + * logic. + */ + needsRefresh: function needsRefresh() { + var currentTime = AWS.util.date.getDate().getTime(); + var adjustedTime = new Date(currentTime + this.expiryWindow * 1000); + + if (this.expireTime && adjustedTime > this.expireTime) + return true; + + return this.expired || !this.token; + }, + + /** + * Gets the existing token, refreshing them if they are not yet loaded + * or have expired. Users should call this method before using {refresh}, + * as this will not attempt to reload token when they are already + * loaded into the object. + * + * @callback callback function(err) + * When this callback is called with no error, it means either token + * do not need to be refreshed or refreshed token information has + * been loaded into the object (as the `token` property). + * @param err [Error] if an error occurred, this value will be filled + */ + get: function get(callback) { + var self = this; + if (this.needsRefresh()) { + this.refresh(function(err) { + if (!err) self.expired = false; // reset expired flag + if (callback) callback(err); + }); + } else if (callback) { + callback(); + } + }, + + /** + * @!method getPromise() + * Returns a 'thenable' promise. + * Gets the existing token, refreshing it if it's not yet loaded + * or have expired. Users should call this method before using {refresh}, + * as this will not attempt to reload token when it's already + * loaded into the object. + * + * Two callbacks can be provided to the `then` method on the returned promise. + * The first callback will be called if the promise is fulfilled, and the second + * callback will be called if the promise is rejected. + * @callback fulfilledCallback function() + * Called if the promise is fulfilled. When this callback is called, it means + * either token does not need to be refreshed or refreshed token information + * has been loaded into the object (as the `token` property). + * @callback rejectedCallback function(err) + * Called if the promise is rejected. + * @param err [Error] if an error occurred, this value will be filled. + * @return [Promise] A promise that represents the state of the `get` call. + * @example Calling the `getPromise` method. + * var promise = tokenProvider.getPromise(); + * promise.then(function() { ... }, function(err) { ... }); + */ + + /** + * @!method refreshPromise() + * Returns a 'thenable' promise. + * Refreshes the token. Users should call {get} before attempting + * to forcibly refresh token. + * + * Two callbacks can be provided to the `then` method on the returned promise. + * The first callback will be called if the promise is fulfilled, and the second + * callback will be called if the promise is rejected. + * @callback fulfilledCallback function() + * Called if the promise is fulfilled. When this callback is called, it + * means refreshed token information has been loaded into the object + * (as the `token` property). + * @callback rejectedCallback function(err) + * Called if the promise is rejected. + * @param err [Error] if an error occurred, this value will be filled. + * @return [Promise] A promise that represents the state of the `refresh` call. + * @example Calling the `refreshPromise` method. + * var promise = tokenProvider.refreshPromise(); + * promise.then(function() { ... }, function(err) { ... }); + */ + + /** + * Refreshes the token. Users should call {get} before attempting + * to forcibly refresh token. + * + * @callback callback function(err) + * When this callback is called with no error, it means refreshed + * token information has been loaded into the object (as the + * `token` property). + * @param err [Error] if an error occurred, this value will be filled + * @note Subclasses should override this class to reset the + * {token} on the token object and then call the callback with + * any error information. + * @see get + */ + refresh: function refresh(callback) { + this.expired = false; + callback(); + }, + + /** + * @api private + * @param callback + */ + coalesceRefresh: function coalesceRefresh(callback, sync) { + var self = this; + if (self.refreshCallbacks.push(callback) === 1) { + self.load(function onLoad(err) { + AWS.util.arrayEach(self.refreshCallbacks, function(callback) { + if (sync) { + callback(err); + } else { + // callback could throw, so defer to ensure all callbacks are notified + AWS.util.defer(function () { + callback(err); + }); + } + }); + self.refreshCallbacks.length = 0; + }); + } + }, + + /** + * @api private + * @param callback + */ + load: function load(callback) { + callback(); + } +}); + +/** + * @api private + */ +AWS.Token.addPromisesToClass = function addPromisesToClass(PromiseDependency) { + this.prototype.getPromise = AWS.util.promisifyMethod('get', PromiseDependency); + this.prototype.refreshPromise = AWS.util.promisifyMethod('refresh', PromiseDependency); +}; + +/** + * @api private + */ +AWS.Token.deletePromisesFromClass = function deletePromisesFromClass() { + delete this.prototype.getPromise; + delete this.prototype.refreshPromise; +}; + +AWS.util.addPromises(AWS.Token); + + +/***/ }), + +/***/ 90327: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var crypto = __nccwpck_require__(6113); +var fs = __nccwpck_require__(57147); +var path = __nccwpck_require__(71017); +var iniLoader = AWS.util.iniLoader; + +// Tracking refresh attempt to ensure refresh is not attempted more than once every 30 seconds. +var lastRefreshAttemptTime = 0; + +/** + * Throws error is key is not present in token object. + * + * @param token [Object] Object to be validated. + * @param key [String] The key to be validated on the object. + */ +var validateTokenKey = function validateTokenKey(token, key) { + if (!token[key]) { + throw AWS.util.error( + new Error('Key "' + key + '" not present in SSO Token'), + { code: 'SSOTokenProviderFailure' } + ); + } +}; + +/** + * Calls callback function with or without error based on provided times in case + * of unsuccessful refresh. + * + * @param currentTime [number] current time in milliseconds since ECMAScript epoch. + * @param tokenExpireTime [number] token expire time in milliseconds since ECMAScript epoch. + * @param callback [Function] Callback to call in case of error. + */ +var refreshUnsuccessful = function refreshUnsuccessful( + currentTime, + tokenExpireTime, + callback +) { + if (tokenExpireTime > currentTime) { + // Cached token is still valid, return. + callback(null); + } else { + // Token invalid, throw error requesting user to sso login. + throw AWS.util.error( + new Error('SSO Token refresh failed. Please log in using "aws sso login"'), + { code: 'SSOTokenProviderFailure' } + ); + } +}; + +/** + * Represents token loaded from disk derived from the AWS SSO device grant authorication flow. + * + * ## Using SSO Token Provider + * + * This provider is checked by default in the Node.js environment in TokenProviderChain. + * To use the SSO Token Provider, simply add your SSO Start URL and Region to the + * ~/.aws/config file in the following format: + * + * [default] + * sso_start_url = https://d-abc123.awsapps.com/start + * sso_region = us-east-1 + * + * ## Using custom profiles + * + * The SDK supports loading token for separate profiles. This can be done in two ways: + * + * 1. Set the `AWS_PROFILE` environment variable in your process prior to loading the SDK. + * 2. Directly load the AWS.SSOTokenProvider: + * + * ```javascript + * var ssoTokenProvider = new AWS.SSOTokenProvider({profile: 'myprofile'}); + * ``` + * + * @!macro nobrowser + */ +AWS.SSOTokenProvider = AWS.util.inherit(AWS.Token, { + /** + * Expiry window of five minutes. + */ + expiryWindow: 5 * 60, + + /** + * Creates a new token object from cached access token. + * + * @param options [map] a set of options + * @option options profile [String] (AWS_PROFILE env var or 'default') + * the name of the profile to load. + * @option options callback [Function] (err) Token is eagerly loaded + * by the constructor. When the callback is called with no error, the + * token has been loaded successfully. + */ + constructor: function SSOTokenProvider(options) { + AWS.Token.call(this); + + options = options || {}; + + this.expired = true; + this.profile = options.profile || process.env.AWS_PROFILE || AWS.util.defaultProfile; + this.get(options.callback || AWS.util.fn.noop); + }, + + /** + * Reads sso_start_url from provided profile, and reads token from + * ~/.aws/sso/cache/.json + * + * Throws an error if required fields token and expiresAt are missing. + * Throws an error if token has expired and metadata to perform refresh is + * not available. + * Attempts to refresh the token if it's within 5 minutes before expiry time. + * + * @api private + */ + load: function load(callback) { + var self = this; + var profiles = iniLoader.loadFrom({ isConfig: true }); + var profile = profiles[this.profile] || {}; + + if (Object.keys(profile).length === 0) { + throw AWS.util.error( + new Error('Profile "' + this.profile + '" not found'), + { code: 'SSOTokenProviderFailure' } + ); + } else if (!profile['sso_session']) { + throw AWS.util.error( + new Error('Profile "' + this.profile + '" is missing required property "sso_session".'), + { code: 'SSOTokenProviderFailure' } + ); + } + + var ssoSessionName = profile['sso_session']; + var ssoSessions = iniLoader.loadSsoSessionsFrom(); + var ssoSession = ssoSessions[ssoSessionName]; + + if (!ssoSession) { + throw AWS.util.error( + new Error('Sso session "' + ssoSessionName + '" not found'), + { code: 'SSOTokenProviderFailure' } + ); + } else if (!ssoSession['sso_start_url']) { + throw AWS.util.error( + new Error('Sso session "' + this.profile + '" is missing required property "sso_start_url".'), + { code: 'SSOTokenProviderFailure' } + ); + } else if (!ssoSession['sso_region']) { + throw AWS.util.error( + new Error('Sso session "' + this.profile + '" is missing required property "sso_region".'), + { code: 'SSOTokenProviderFailure' } + ); + } + + var hasher = crypto.createHash('sha1'); + var fileName = hasher.update(ssoSessionName).digest('hex') + '.json'; + var cachePath = path.join(iniLoader.getHomeDir(), '.aws', 'sso', 'cache', fileName); + var tokenFromCache = JSON.parse(fs.readFileSync(cachePath)); + + if (!tokenFromCache) { + throw AWS.util.error( + new Error('Cached token not found. Please log in using "aws sso login"' + + ' for profile "' + this.profile + '".'), + { code: 'SSOTokenProviderFailure' } + ); + } + + validateTokenKey(tokenFromCache, 'accessToken'); + validateTokenKey(tokenFromCache, 'expiresAt'); + + var currentTime = AWS.util.date.getDate().getTime(); + var adjustedTime = new Date(currentTime + this.expiryWindow * 1000); + var tokenExpireTime = new Date(tokenFromCache['expiresAt']); + + if (tokenExpireTime > adjustedTime) { + // Token is valid and not expired. + self.token = tokenFromCache.accessToken; + self.expireTime = tokenExpireTime; + self.expired = false; + callback(null); + return; + } + + // Skip new refresh, if last refresh was done within 30 seconds. + if (currentTime - lastRefreshAttemptTime < 30 * 1000) { + refreshUnsuccessful(currentTime, tokenExpireTime, callback); + return; + } + + // Token is in expiry window, refresh from SSOOIDC.createToken() call. + validateTokenKey(tokenFromCache, 'clientId'); + validateTokenKey(tokenFromCache, 'clientSecret'); + validateTokenKey(tokenFromCache, 'refreshToken'); + + if (!self.service || self.service.config.region !== ssoSession.sso_region) { + self.service = new AWS.SSOOIDC({ region: ssoSession.sso_region }); + } + + var params = { + clientId: tokenFromCache.clientId, + clientSecret: tokenFromCache.clientSecret, + refreshToken: tokenFromCache.refreshToken, + grantType: 'refresh_token', + }; + + lastRefreshAttemptTime = AWS.util.date.getDate().getTime(); + self.service.createToken(params, function(err, data) { + if (err || !data) { + refreshUnsuccessful(currentTime, tokenExpireTime, callback); + } else { + try { + validateTokenKey(data, 'accessToken'); + validateTokenKey(data, 'expiresIn'); + self.expired = false; + self.token = data.accessToken; + self.expireTime = new Date(Date.now() + data.expiresIn * 1000); + callback(null); + + try { + // Write updated token data to disk. + tokenFromCache.accessToken = data.accessToken; + tokenFromCache.expiresAt = self.expireTime.toISOString(); + tokenFromCache.refreshToken = data.refreshToken; + fs.writeFileSync(cachePath, JSON.stringify(tokenFromCache, null, 2)); + } catch (error) { + // Swallow error if unable to write token to file. + } + } catch (error) { + refreshUnsuccessful(currentTime, tokenExpireTime, callback); + } + } + }); + }, + + /** + * Loads the cached access token from disk. + * + * @callback callback function(err) + * Called after the AWS SSO process has been executed. When this + * callback is called with no error, it means that the token information + * has been loaded into the object (as the `token` property). + * @param err [Error] if an error occurred, this value will be filled. + * @see get + */ + refresh: function refresh(callback) { + iniLoader.clearCachedFiles(); + this.coalesceRefresh(callback || AWS.util.fn.callback); + }, +}); + + +/***/ }), + +/***/ 50126: +/***/ ((__unused_webpack_module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); + +/** + * Creates a token provider chain that searches for token in a list of + * token providers specified by the {providers} property. + * + * By default, the chain will use the {defaultProviders} to resolve token. + * + * ## Setting Providers + * + * Each provider in the {providers} list should be a function that returns + * a {AWS.Token} object, or a hardcoded token object. The function + * form allows for delayed execution of the Token construction. + * + * ## Resolving Token from a Chain + * + * Call {resolve} to return the first valid token object that can be + * loaded by the provider chain. + * + * For example, to resolve a chain with a custom provider that checks a file + * on disk after the set of {defaultProviders}: + * + * ```javascript + * var diskProvider = new FileTokenProvider('./token.json'); + * var chain = new AWS.TokenProviderChain(); + * chain.providers.push(diskProvider); + * chain.resolve(); + * ``` + * + * The above code will return the `diskProvider` object if the + * file contains token and the `defaultProviders` do not contain + * any token. + * + * @!attribute providers + * @return [Array] + * a list of token objects or functions that return token + * objects. If the provider is a function, the function will be + * executed lazily when the provider needs to be checked for valid + * token. By default, this object will be set to the {defaultProviders}. + * @see defaultProviders + */ +AWS.TokenProviderChain = AWS.util.inherit(AWS.Token, { + + /** + * Creates a new TokenProviderChain with a default set of providers + * specified by {defaultProviders}. + */ + constructor: function TokenProviderChain(providers) { + if (providers) { + this.providers = providers; + } else { + this.providers = AWS.TokenProviderChain.defaultProviders.slice(0); + } + this.resolveCallbacks = []; + }, + + /** + * @!method resolvePromise() + * Returns a 'thenable' promise. + * Resolves the provider chain by searching for the first token in {providers}. + * + * Two callbacks can be provided to the `then` method on the returned promise. + * The first callback will be called if the promise is fulfilled, and the second + * callback will be called if the promise is rejected. + * @callback fulfilledCallback function(token) + * Called if the promise is fulfilled and the provider resolves the chain + * to a token object + * @param token [AWS.Token] the token object resolved by the provider chain. + * @callback rejectedCallback function(error) + * Called if the promise is rejected. + * @param err [Error] the error object returned if no token is found. + * @return [Promise] A promise that represents the state of the `resolve` method call. + * @example Calling the `resolvePromise` method. + * var promise = chain.resolvePromise(); + * promise.then(function(token) { ... }, function(err) { ... }); + */ + + /** + * Resolves the provider chain by searching for the first token in {providers}. + * + * @callback callback function(err, token) + * Called when the provider resolves the chain to a token object + * or null if no token can be found. + * + * @param err [Error] the error object returned if no token is found. + * @param token [AWS.Token] the token object resolved by the provider chain. + * @return [AWS.TokenProviderChain] the provider, for chaining. + */ + resolve: function resolve(callback) { + var self = this; + if (self.providers.length === 0) { + callback(new Error('No providers')); + return self; + } + + if (self.resolveCallbacks.push(callback) === 1) { + var index = 0; + var providers = self.providers.slice(0); + + function resolveNext(err, token) { + if ((!err && token) || index === providers.length) { + AWS.util.arrayEach(self.resolveCallbacks, function (callback) { + callback(err, token); + }); + self.resolveCallbacks.length = 0; + return; + } + + var provider = providers[index++]; + if (typeof provider === 'function') { + token = provider.call(); + } else { + token = provider; + } + + if (token.get) { + token.get(function (getErr) { + resolveNext(getErr, getErr ? null : token); + }); + } else { + resolveNext(null, token); + } + } + + resolveNext(); + } + + return self; + } +}); + +/** + * The default set of providers used by a vanilla TokenProviderChain. + * + * In the browser: + * + * ```javascript + * AWS.TokenProviderChain.defaultProviders = [] + * ``` + * + * In Node.js: + * + * ```javascript + * AWS.TokenProviderChain.defaultProviders = [ + * function () { return new AWS.SSOTokenProvider(); }, + * ] + * ``` + */ +AWS.TokenProviderChain.defaultProviders = []; + +/** + * @api private + */ +AWS.TokenProviderChain.addPromisesToClass = function addPromisesToClass(PromiseDependency) { + this.prototype.resolvePromise = AWS.util.promisifyMethod('resolve', PromiseDependency); +}; + +/** + * @api private + */ +AWS.TokenProviderChain.deletePromisesFromClass = function deletePromisesFromClass() { + delete this.prototype.resolvePromise; +}; + +AWS.util.addPromises(AWS.TokenProviderChain); + + +/***/ }), + +/***/ 77985: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/* eslint guard-for-in:0 */ +var AWS; + +/** + * A set of utility methods for use with the AWS SDK. + * + * @!attribute abort + * Return this value from an iterator function {each} or {arrayEach} + * to break out of the iteration. + * @example Breaking out of an iterator function + * AWS.util.each({a: 1, b: 2, c: 3}, function(key, value) { + * if (key == 'b') return AWS.util.abort; + * }); + * @see each + * @see arrayEach + * @api private + */ +var util = { + environment: 'nodejs', + engine: function engine() { + if (util.isBrowser() && typeof navigator !== 'undefined') { + return navigator.userAgent; + } else { + var engine = process.platform + '/' + process.version; + if (process.env.AWS_EXECUTION_ENV) { + engine += ' exec-env/' + process.env.AWS_EXECUTION_ENV; + } + return engine; + } + }, + + userAgent: function userAgent() { + var name = util.environment; + var agent = 'aws-sdk-' + name + '/' + (__nccwpck_require__(28437).VERSION); + if (name === 'nodejs') agent += ' ' + util.engine(); + return agent; + }, + + uriEscape: function uriEscape(string) { + var output = encodeURIComponent(string); + output = output.replace(/[^A-Za-z0-9_.~\-%]+/g, escape); + + // AWS percent-encodes some extra non-standard characters in a URI + output = output.replace(/[*]/g, function(ch) { + return '%' + ch.charCodeAt(0).toString(16).toUpperCase(); + }); + + return output; + }, + + uriEscapePath: function uriEscapePath(string) { + var parts = []; + util.arrayEach(string.split('/'), function (part) { + parts.push(util.uriEscape(part)); + }); + return parts.join('/'); + }, + + urlParse: function urlParse(url) { + return util.url.parse(url); + }, + + urlFormat: function urlFormat(url) { + return util.url.format(url); + }, + + queryStringParse: function queryStringParse(qs) { + return util.querystring.parse(qs); + }, + + queryParamsToString: function queryParamsToString(params) { + var items = []; + var escape = util.uriEscape; + var sortedKeys = Object.keys(params).sort(); + + util.arrayEach(sortedKeys, function(name) { + var value = params[name]; + var ename = escape(name); + var result = ename + '='; + if (Array.isArray(value)) { + var vals = []; + util.arrayEach(value, function(item) { vals.push(escape(item)); }); + result = ename + '=' + vals.sort().join('&' + ename + '='); + } else if (value !== undefined && value !== null) { + result = ename + '=' + escape(value); + } + items.push(result); + }); + + return items.join('&'); + }, + + readFileSync: function readFileSync(path) { + if (util.isBrowser()) return null; + return (__nccwpck_require__(57147).readFileSync)(path, 'utf-8'); + }, + + base64: { + encode: function encode64(string) { + if (typeof string === 'number') { + throw util.error(new Error('Cannot base64 encode number ' + string)); + } + if (string === null || typeof string === 'undefined') { + return string; + } + var buf = util.buffer.toBuffer(string); + return buf.toString('base64'); + }, + + decode: function decode64(string) { + if (typeof string === 'number') { + throw util.error(new Error('Cannot base64 decode number ' + string)); + } + if (string === null || typeof string === 'undefined') { + return string; + } + return util.buffer.toBuffer(string, 'base64'); + } + + }, + + buffer: { + /** + * Buffer constructor for Node buffer and buffer pollyfill + */ + toBuffer: function(data, encoding) { + return (typeof util.Buffer.from === 'function' && util.Buffer.from !== Uint8Array.from) ? + util.Buffer.from(data, encoding) : new util.Buffer(data, encoding); + }, + + alloc: function(size, fill, encoding) { + if (typeof size !== 'number') { + throw new Error('size passed to alloc must be a number.'); + } + if (typeof util.Buffer.alloc === 'function') { + return util.Buffer.alloc(size, fill, encoding); + } else { + var buf = new util.Buffer(size); + if (fill !== undefined && typeof buf.fill === 'function') { + buf.fill(fill, undefined, undefined, encoding); + } + return buf; + } + }, + + toStream: function toStream(buffer) { + if (!util.Buffer.isBuffer(buffer)) buffer = util.buffer.toBuffer(buffer); + + var readable = new (util.stream.Readable)(); + var pos = 0; + readable._read = function(size) { + if (pos >= buffer.length) return readable.push(null); + + var end = pos + size; + if (end > buffer.length) end = buffer.length; + readable.push(buffer.slice(pos, end)); + pos = end; + }; + + return readable; + }, + + /** + * Concatenates a list of Buffer objects. + */ + concat: function(buffers) { + var length = 0, + offset = 0, + buffer = null, i; + + for (i = 0; i < buffers.length; i++) { + length += buffers[i].length; + } + + buffer = util.buffer.alloc(length); + + for (i = 0; i < buffers.length; i++) { + buffers[i].copy(buffer, offset); + offset += buffers[i].length; + } + + return buffer; + } + }, + + string: { + byteLength: function byteLength(string) { + if (string === null || string === undefined) return 0; + if (typeof string === 'string') string = util.buffer.toBuffer(string); + + if (typeof string.byteLength === 'number') { + return string.byteLength; + } else if (typeof string.length === 'number') { + return string.length; + } else if (typeof string.size === 'number') { + return string.size; + } else if (typeof string.path === 'string') { + return (__nccwpck_require__(57147).lstatSync)(string.path).size; + } else { + throw util.error(new Error('Cannot determine length of ' + string), + { object: string }); + } + }, + + upperFirst: function upperFirst(string) { + return string[0].toUpperCase() + string.substr(1); + }, + + lowerFirst: function lowerFirst(string) { + return string[0].toLowerCase() + string.substr(1); + } + }, + + ini: { + parse: function string(ini) { + var currentSection, map = {}; + util.arrayEach(ini.split(/\r?\n/), function(line) { + line = line.split(/(^|\s)[;#]/)[0].trim(); // remove comments and trim + var isSection = line[0] === '[' && line[line.length - 1] === ']'; + if (isSection) { + currentSection = line.substring(1, line.length - 1); + if (currentSection === '__proto__' || currentSection.split(/\s/)[1] === '__proto__') { + throw util.error( + new Error('Cannot load profile name \'' + currentSection + '\' from shared ini file.') + ); + } + } else if (currentSection) { + var indexOfEqualsSign = line.indexOf('='); + var start = 0; + var end = line.length - 1; + var isAssignment = + indexOfEqualsSign !== -1 && indexOfEqualsSign !== start && indexOfEqualsSign !== end; + + if (isAssignment) { + var name = line.substring(0, indexOfEqualsSign).trim(); + var value = line.substring(indexOfEqualsSign + 1).trim(); + + map[currentSection] = map[currentSection] || {}; + map[currentSection][name] = value; + } + } + }); + + return map; + } + }, + + fn: { + noop: function() {}, + callback: function (err) { if (err) throw err; }, + + /** + * Turn a synchronous function into as "async" function by making it call + * a callback. The underlying function is called with all but the last argument, + * which is treated as the callback. The callback is passed passed a first argument + * of null on success to mimick standard node callbacks. + */ + makeAsync: function makeAsync(fn, expectedArgs) { + if (expectedArgs && expectedArgs <= fn.length) { + return fn; + } + + return function() { + var args = Array.prototype.slice.call(arguments, 0); + var callback = args.pop(); + var result = fn.apply(null, args); + callback(result); + }; + } + }, + + /** + * Date and time utility functions. + */ + date: { + + /** + * @return [Date] the current JavaScript date object. Since all + * AWS services rely on this date object, you can override + * this function to provide a special time value to AWS service + * requests. + */ + getDate: function getDate() { + if (!AWS) AWS = __nccwpck_require__(28437); + if (AWS.config.systemClockOffset) { // use offset when non-zero + return new Date(new Date().getTime() + AWS.config.systemClockOffset); + } else { + return new Date(); + } + }, + + /** + * @return [String] the date in ISO-8601 format + */ + iso8601: function iso8601(date) { + if (date === undefined) { date = util.date.getDate(); } + return date.toISOString().replace(/\.\d{3}Z$/, 'Z'); + }, + + /** + * @return [String] the date in RFC 822 format + */ + rfc822: function rfc822(date) { + if (date === undefined) { date = util.date.getDate(); } + return date.toUTCString(); + }, + + /** + * @return [Integer] the UNIX timestamp value for the current time + */ + unixTimestamp: function unixTimestamp(date) { + if (date === undefined) { date = util.date.getDate(); } + return date.getTime() / 1000; + }, + + /** + * @param [String,number,Date] date + * @return [Date] + */ + from: function format(date) { + if (typeof date === 'number') { + return new Date(date * 1000); // unix timestamp + } else { + return new Date(date); + } + }, + + /** + * Given a Date or date-like value, this function formats the + * date into a string of the requested value. + * @param [String,number,Date] date + * @param [String] formatter Valid formats are: + # * 'iso8601' + # * 'rfc822' + # * 'unixTimestamp' + * @return [String] + */ + format: function format(date, formatter) { + if (!formatter) formatter = 'iso8601'; + return util.date[formatter](util.date.from(date)); + }, + + parseTimestamp: function parseTimestamp(value) { + if (typeof value === 'number') { // unix timestamp (number) + return new Date(value * 1000); + } else if (value.match(/^\d+$/)) { // unix timestamp + return new Date(value * 1000); + } else if (value.match(/^\d{4}/)) { // iso8601 + return new Date(value); + } else if (value.match(/^\w{3},/)) { // rfc822 + return new Date(value); + } else { + throw util.error( + new Error('unhandled timestamp format: ' + value), + {code: 'TimestampParserError'}); + } + } + + }, + + crypto: { + crc32Table: [ + 0x00000000, 0x77073096, 0xEE0E612C, 0x990951BA, 0x076DC419, + 0x706AF48F, 0xE963A535, 0x9E6495A3, 0x0EDB8832, 0x79DCB8A4, + 0xE0D5E91E, 0x97D2D988, 0x09B64C2B, 0x7EB17CBD, 0xE7B82D07, + 0x90BF1D91, 0x1DB71064, 0x6AB020F2, 0xF3B97148, 0x84BE41DE, + 0x1ADAD47D, 0x6DDDE4EB, 0xF4D4B551, 0x83D385C7, 0x136C9856, + 0x646BA8C0, 0xFD62F97A, 0x8A65C9EC, 0x14015C4F, 0x63066CD9, + 0xFA0F3D63, 0x8D080DF5, 0x3B6E20C8, 0x4C69105E, 0xD56041E4, + 0xA2677172, 0x3C03E4D1, 0x4B04D447, 0xD20D85FD, 0xA50AB56B, + 0x35B5A8FA, 0x42B2986C, 0xDBBBC9D6, 0xACBCF940, 0x32D86CE3, + 0x45DF5C75, 0xDCD60DCF, 0xABD13D59, 0x26D930AC, 0x51DE003A, + 0xC8D75180, 0xBFD06116, 0x21B4F4B5, 0x56B3C423, 0xCFBA9599, + 0xB8BDA50F, 0x2802B89E, 0x5F058808, 0xC60CD9B2, 0xB10BE924, + 0x2F6F7C87, 0x58684C11, 0xC1611DAB, 0xB6662D3D, 0x76DC4190, + 0x01DB7106, 0x98D220BC, 0xEFD5102A, 0x71B18589, 0x06B6B51F, + 0x9FBFE4A5, 0xE8B8D433, 0x7807C9A2, 0x0F00F934, 0x9609A88E, + 0xE10E9818, 0x7F6A0DBB, 0x086D3D2D, 0x91646C97, 0xE6635C01, + 0x6B6B51F4, 0x1C6C6162, 0x856530D8, 0xF262004E, 0x6C0695ED, + 0x1B01A57B, 0x8208F4C1, 0xF50FC457, 0x65B0D9C6, 0x12B7E950, + 0x8BBEB8EA, 0xFCB9887C, 0x62DD1DDF, 0x15DA2D49, 0x8CD37CF3, + 0xFBD44C65, 0x4DB26158, 0x3AB551CE, 0xA3BC0074, 0xD4BB30E2, + 0x4ADFA541, 0x3DD895D7, 0xA4D1C46D, 0xD3D6F4FB, 0x4369E96A, + 0x346ED9FC, 0xAD678846, 0xDA60B8D0, 0x44042D73, 0x33031DE5, + 0xAA0A4C5F, 0xDD0D7CC9, 0x5005713C, 0x270241AA, 0xBE0B1010, + 0xC90C2086, 0x5768B525, 0x206F85B3, 0xB966D409, 0xCE61E49F, + 0x5EDEF90E, 0x29D9C998, 0xB0D09822, 0xC7D7A8B4, 0x59B33D17, + 0x2EB40D81, 0xB7BD5C3B, 0xC0BA6CAD, 0xEDB88320, 0x9ABFB3B6, + 0x03B6E20C, 0x74B1D29A, 0xEAD54739, 0x9DD277AF, 0x04DB2615, + 0x73DC1683, 0xE3630B12, 0x94643B84, 0x0D6D6A3E, 0x7A6A5AA8, + 0xE40ECF0B, 0x9309FF9D, 0x0A00AE27, 0x7D079EB1, 0xF00F9344, + 0x8708A3D2, 0x1E01F268, 0x6906C2FE, 0xF762575D, 0x806567CB, + 0x196C3671, 0x6E6B06E7, 0xFED41B76, 0x89D32BE0, 0x10DA7A5A, + 0x67DD4ACC, 0xF9B9DF6F, 0x8EBEEFF9, 0x17B7BE43, 0x60B08ED5, + 0xD6D6A3E8, 0xA1D1937E, 0x38D8C2C4, 0x4FDFF252, 0xD1BB67F1, + 0xA6BC5767, 0x3FB506DD, 0x48B2364B, 0xD80D2BDA, 0xAF0A1B4C, + 0x36034AF6, 0x41047A60, 0xDF60EFC3, 0xA867DF55, 0x316E8EEF, + 0x4669BE79, 0xCB61B38C, 0xBC66831A, 0x256FD2A0, 0x5268E236, + 0xCC0C7795, 0xBB0B4703, 0x220216B9, 0x5505262F, 0xC5BA3BBE, + 0xB2BD0B28, 0x2BB45A92, 0x5CB36A04, 0xC2D7FFA7, 0xB5D0CF31, + 0x2CD99E8B, 0x5BDEAE1D, 0x9B64C2B0, 0xEC63F226, 0x756AA39C, + 0x026D930A, 0x9C0906A9, 0xEB0E363F, 0x72076785, 0x05005713, + 0x95BF4A82, 0xE2B87A14, 0x7BB12BAE, 0x0CB61B38, 0x92D28E9B, + 0xE5D5BE0D, 0x7CDCEFB7, 0x0BDBDF21, 0x86D3D2D4, 0xF1D4E242, + 0x68DDB3F8, 0x1FDA836E, 0x81BE16CD, 0xF6B9265B, 0x6FB077E1, + 0x18B74777, 0x88085AE6, 0xFF0F6A70, 0x66063BCA, 0x11010B5C, + 0x8F659EFF, 0xF862AE69, 0x616BFFD3, 0x166CCF45, 0xA00AE278, + 0xD70DD2EE, 0x4E048354, 0x3903B3C2, 0xA7672661, 0xD06016F7, + 0x4969474D, 0x3E6E77DB, 0xAED16A4A, 0xD9D65ADC, 0x40DF0B66, + 0x37D83BF0, 0xA9BCAE53, 0xDEBB9EC5, 0x47B2CF7F, 0x30B5FFE9, + 0xBDBDF21C, 0xCABAC28A, 0x53B39330, 0x24B4A3A6, 0xBAD03605, + 0xCDD70693, 0x54DE5729, 0x23D967BF, 0xB3667A2E, 0xC4614AB8, + 0x5D681B02, 0x2A6F2B94, 0xB40BBE37, 0xC30C8EA1, 0x5A05DF1B, + 0x2D02EF8D], + + crc32: function crc32(data) { + var tbl = util.crypto.crc32Table; + var crc = 0 ^ -1; + + if (typeof data === 'string') { + data = util.buffer.toBuffer(data); + } + + for (var i = 0; i < data.length; i++) { + var code = data.readUInt8(i); + crc = (crc >>> 8) ^ tbl[(crc ^ code) & 0xFF]; + } + return (crc ^ -1) >>> 0; + }, + + hmac: function hmac(key, string, digest, fn) { + if (!digest) digest = 'binary'; + if (digest === 'buffer') { digest = undefined; } + if (!fn) fn = 'sha256'; + if (typeof string === 'string') string = util.buffer.toBuffer(string); + return util.crypto.lib.createHmac(fn, key).update(string).digest(digest); + }, + + md5: function md5(data, digest, callback) { + return util.crypto.hash('md5', data, digest, callback); + }, + + sha256: function sha256(data, digest, callback) { + return util.crypto.hash('sha256', data, digest, callback); + }, + + hash: function(algorithm, data, digest, callback) { + var hash = util.crypto.createHash(algorithm); + if (!digest) { digest = 'binary'; } + if (digest === 'buffer') { digest = undefined; } + if (typeof data === 'string') data = util.buffer.toBuffer(data); + var sliceFn = util.arraySliceFn(data); + var isBuffer = util.Buffer.isBuffer(data); + //Identifying objects with an ArrayBuffer as buffers + if (util.isBrowser() && typeof ArrayBuffer !== 'undefined' && data && data.buffer instanceof ArrayBuffer) isBuffer = true; + + if (callback && typeof data === 'object' && + typeof data.on === 'function' && !isBuffer) { + data.on('data', function(chunk) { hash.update(chunk); }); + data.on('error', function(err) { callback(err); }); + data.on('end', function() { callback(null, hash.digest(digest)); }); + } else if (callback && sliceFn && !isBuffer && + typeof FileReader !== 'undefined') { + // this might be a File/Blob + var index = 0, size = 1024 * 512; + var reader = new FileReader(); + reader.onerror = function() { + callback(new Error('Failed to read data.')); + }; + reader.onload = function() { + var buf = new util.Buffer(new Uint8Array(reader.result)); + hash.update(buf); + index += buf.length; + reader._continueReading(); + }; + reader._continueReading = function() { + if (index >= data.size) { + callback(null, hash.digest(digest)); + return; + } + + var back = index + size; + if (back > data.size) back = data.size; + reader.readAsArrayBuffer(sliceFn.call(data, index, back)); + }; + + reader._continueReading(); + } else { + if (util.isBrowser() && typeof data === 'object' && !isBuffer) { + data = new util.Buffer(new Uint8Array(data)); + } + var out = hash.update(data).digest(digest); + if (callback) callback(null, out); + return out; + } + }, + + toHex: function toHex(data) { + var out = []; + for (var i = 0; i < data.length; i++) { + out.push(('0' + data.charCodeAt(i).toString(16)).substr(-2, 2)); + } + return out.join(''); + }, + + createHash: function createHash(algorithm) { + return util.crypto.lib.createHash(algorithm); + } + + }, + + /** @!ignore */ + + /* Abort constant */ + abort: {}, + + each: function each(object, iterFunction) { + for (var key in object) { + if (Object.prototype.hasOwnProperty.call(object, key)) { + var ret = iterFunction.call(this, key, object[key]); + if (ret === util.abort) break; + } + } + }, + + arrayEach: function arrayEach(array, iterFunction) { + for (var idx in array) { + if (Object.prototype.hasOwnProperty.call(array, idx)) { + var ret = iterFunction.call(this, array[idx], parseInt(idx, 10)); + if (ret === util.abort) break; + } + } + }, + + update: function update(obj1, obj2) { + util.each(obj2, function iterator(key, item) { + obj1[key] = item; + }); + return obj1; + }, + + merge: function merge(obj1, obj2) { + return util.update(util.copy(obj1), obj2); + }, + + copy: function copy(object) { + if (object === null || object === undefined) return object; + var dupe = {}; + // jshint forin:false + for (var key in object) { + dupe[key] = object[key]; + } + return dupe; + }, + + isEmpty: function isEmpty(obj) { + for (var prop in obj) { + if (Object.prototype.hasOwnProperty.call(obj, prop)) { + return false; + } + } + return true; + }, + + arraySliceFn: function arraySliceFn(obj) { + var fn = obj.slice || obj.webkitSlice || obj.mozSlice; + return typeof fn === 'function' ? fn : null; + }, + + isType: function isType(obj, type) { + // handle cross-"frame" objects + if (typeof type === 'function') type = util.typeName(type); + return Object.prototype.toString.call(obj) === '[object ' + type + ']'; + }, + + typeName: function typeName(type) { + if (Object.prototype.hasOwnProperty.call(type, 'name')) return type.name; + var str = type.toString(); + var match = str.match(/^\s*function (.+)\(/); + return match ? match[1] : str; + }, + + error: function error(err, options) { + var originalError = null; + if (typeof err.message === 'string' && err.message !== '') { + if (typeof options === 'string' || (options && options.message)) { + originalError = util.copy(err); + originalError.message = err.message; + } + } + err.message = err.message || null; + + if (typeof options === 'string') { + err.message = options; + } else if (typeof options === 'object' && options !== null) { + util.update(err, options); + if (options.message) + err.message = options.message; + if (options.code || options.name) + err.code = options.code || options.name; + if (options.stack) + err.stack = options.stack; + } + + if (typeof Object.defineProperty === 'function') { + Object.defineProperty(err, 'name', {writable: true, enumerable: false}); + Object.defineProperty(err, 'message', {enumerable: true}); + } + + err.name = String(options && options.name || err.name || err.code || 'Error'); + err.time = new Date(); + + if (originalError) { + err.originalError = originalError; + } + + + for (var key in options || {}) { + if (key[0] === '[' && key[key.length - 1] === ']') { + key = key.slice(1, -1); + if (key === 'code' || key === 'message') { + continue; + } + err['[' + key + ']'] = 'See error.' + key + ' for details.'; + Object.defineProperty(err, key, { + value: err[key] || (options && options[key]) || (originalError && originalError[key]), + enumerable: false, + writable: true + }); + } + } + + return err; + }, + + /** + * @api private + */ + inherit: function inherit(klass, features) { + var newObject = null; + if (features === undefined) { + features = klass; + klass = Object; + newObject = {}; + } else { + var ctor = function ConstructorWrapper() {}; + ctor.prototype = klass.prototype; + newObject = new ctor(); + } + + // constructor not supplied, create pass-through ctor + if (features.constructor === Object) { + features.constructor = function() { + if (klass !== Object) { + return klass.apply(this, arguments); + } + }; + } + + features.constructor.prototype = newObject; + util.update(features.constructor.prototype, features); + features.constructor.__super__ = klass; + return features.constructor; + }, + + /** + * @api private + */ + mixin: function mixin() { + var klass = arguments[0]; + for (var i = 1; i < arguments.length; i++) { + // jshint forin:false + for (var prop in arguments[i].prototype) { + var fn = arguments[i].prototype[prop]; + if (prop !== 'constructor') { + klass.prototype[prop] = fn; + } + } + } + return klass; + }, + + /** + * @api private + */ + hideProperties: function hideProperties(obj, props) { + if (typeof Object.defineProperty !== 'function') return; + + util.arrayEach(props, function (key) { + Object.defineProperty(obj, key, { + enumerable: false, writable: true, configurable: true }); + }); + }, + + /** + * @api private + */ + property: function property(obj, name, value, enumerable, isValue) { + var opts = { + configurable: true, + enumerable: enumerable !== undefined ? enumerable : true + }; + if (typeof value === 'function' && !isValue) { + opts.get = value; + } + else { + opts.value = value; opts.writable = true; + } + + Object.defineProperty(obj, name, opts); + }, + + /** + * @api private + */ + memoizedProperty: function memoizedProperty(obj, name, get, enumerable) { + var cachedValue = null; + + // build enumerable attribute for each value with lazy accessor. + util.property(obj, name, function() { + if (cachedValue === null) { + cachedValue = get(); + } + return cachedValue; + }, enumerable); + }, + + /** + * TODO Remove in major version revision + * This backfill populates response data without the + * top-level payload name. + * + * @api private + */ + hoistPayloadMember: function hoistPayloadMember(resp) { + var req = resp.request; + var operationName = req.operation; + var operation = req.service.api.operations[operationName]; + var output = operation.output; + if (output.payload && !operation.hasEventOutput) { + var payloadMember = output.members[output.payload]; + var responsePayload = resp.data[output.payload]; + if (payloadMember.type === 'structure') { + util.each(responsePayload, function(key, value) { + util.property(resp.data, key, value, false); + }); + } + } + }, + + /** + * Compute SHA-256 checksums of streams + * + * @api private + */ + computeSha256: function computeSha256(body, done) { + if (util.isNode()) { + var Stream = util.stream.Stream; + var fs = __nccwpck_require__(57147); + if (typeof Stream === 'function' && body instanceof Stream) { + if (typeof body.path === 'string') { // assume file object + var settings = {}; + if (typeof body.start === 'number') { + settings.start = body.start; + } + if (typeof body.end === 'number') { + settings.end = body.end; + } + body = fs.createReadStream(body.path, settings); + } else { // TODO support other stream types + return done(new Error('Non-file stream objects are ' + + 'not supported with SigV4')); + } + } + } + + util.crypto.sha256(body, 'hex', function(err, sha) { + if (err) done(err); + else done(null, sha); + }); + }, + + /** + * @api private + */ + isClockSkewed: function isClockSkewed(serverTime) { + if (serverTime) { + util.property(AWS.config, 'isClockSkewed', + Math.abs(new Date().getTime() - serverTime) >= 300000, false); + return AWS.config.isClockSkewed; + } + }, + + applyClockOffset: function applyClockOffset(serverTime) { + if (serverTime) + AWS.config.systemClockOffset = serverTime - new Date().getTime(); + }, + + /** + * @api private + */ + extractRequestId: function extractRequestId(resp) { + var requestId = resp.httpResponse.headers['x-amz-request-id'] || + resp.httpResponse.headers['x-amzn-requestid']; + + if (!requestId && resp.data && resp.data.ResponseMetadata) { + requestId = resp.data.ResponseMetadata.RequestId; + } + + if (requestId) { + resp.requestId = requestId; + } + + if (resp.error) { + resp.error.requestId = requestId; + } + }, + + /** + * @api private + */ + addPromises: function addPromises(constructors, PromiseDependency) { + var deletePromises = false; + if (PromiseDependency === undefined && AWS && AWS.config) { + PromiseDependency = AWS.config.getPromisesDependency(); + } + if (PromiseDependency === undefined && typeof Promise !== 'undefined') { + PromiseDependency = Promise; + } + if (typeof PromiseDependency !== 'function') deletePromises = true; + if (!Array.isArray(constructors)) constructors = [constructors]; + + for (var ind = 0; ind < constructors.length; ind++) { + var constructor = constructors[ind]; + if (deletePromises) { + if (constructor.deletePromisesFromClass) { + constructor.deletePromisesFromClass(); + } + } else if (constructor.addPromisesToClass) { + constructor.addPromisesToClass(PromiseDependency); + } + } + }, + + /** + * @api private + * Return a function that will return a promise whose fate is decided by the + * callback behavior of the given method with `methodName`. The method to be + * promisified should conform to node.js convention of accepting a callback as + * last argument and calling that callback with error as the first argument + * and success value on the second argument. + */ + promisifyMethod: function promisifyMethod(methodName, PromiseDependency) { + return function promise() { + var self = this; + var args = Array.prototype.slice.call(arguments); + return new PromiseDependency(function(resolve, reject) { + args.push(function(err, data) { + if (err) { + reject(err); + } else { + resolve(data); + } + }); + self[methodName].apply(self, args); + }); + }; + }, + + /** + * @api private + */ + isDualstackAvailable: function isDualstackAvailable(service) { + if (!service) return false; + var metadata = __nccwpck_require__(17752); + if (typeof service !== 'string') service = service.serviceIdentifier; + if (typeof service !== 'string' || !metadata.hasOwnProperty(service)) return false; + return !!metadata[service].dualstackAvailable; + }, + + /** + * @api private + */ + calculateRetryDelay: function calculateRetryDelay(retryCount, retryDelayOptions, err) { + if (!retryDelayOptions) retryDelayOptions = {}; + var customBackoff = retryDelayOptions.customBackoff || null; + if (typeof customBackoff === 'function') { + return customBackoff(retryCount, err); + } + var base = typeof retryDelayOptions.base === 'number' ? retryDelayOptions.base : 100; + var delay = Math.random() * (Math.pow(2, retryCount) * base); + return delay; + }, + + /** + * @api private + */ + handleRequestWithRetries: function handleRequestWithRetries(httpRequest, options, cb) { + if (!options) options = {}; + var http = AWS.HttpClient.getInstance(); + var httpOptions = options.httpOptions || {}; + var retryCount = 0; + + var errCallback = function(err) { + var maxRetries = options.maxRetries || 0; + if (err && err.code === 'TimeoutError') err.retryable = true; + + // Call `calculateRetryDelay()` only when relevant, see #3401 + if (err && err.retryable && retryCount < maxRetries) { + var delay = util.calculateRetryDelay(retryCount, options.retryDelayOptions, err); + if (delay >= 0) { + retryCount++; + setTimeout(sendRequest, delay + (err.retryAfter || 0)); + return; + } + } + cb(err); + }; + + var sendRequest = function() { + var data = ''; + http.handleRequest(httpRequest, httpOptions, function(httpResponse) { + httpResponse.on('data', function(chunk) { data += chunk.toString(); }); + httpResponse.on('end', function() { + var statusCode = httpResponse.statusCode; + if (statusCode < 300) { + cb(null, data); + } else { + var retryAfter = parseInt(httpResponse.headers['retry-after'], 10) * 1000 || 0; + var err = util.error(new Error(), + { + statusCode: statusCode, + retryable: statusCode >= 500 || statusCode === 429 + } + ); + if (retryAfter && err.retryable) err.retryAfter = retryAfter; + errCallback(err); + } + }); + }, errCallback); + }; + + AWS.util.defer(sendRequest); + }, + + /** + * @api private + */ + uuid: { + v4: function uuidV4() { + return (__nccwpck_require__(57821).v4)(); + } + }, + + /** + * @api private + */ + convertPayloadToString: function convertPayloadToString(resp) { + var req = resp.request; + var operation = req.operation; + var rules = req.service.api.operations[operation].output || {}; + if (rules.payload && resp.data[rules.payload]) { + resp.data[rules.payload] = resp.data[rules.payload].toString(); + } + }, + + /** + * @api private + */ + defer: function defer(callback) { + if (typeof process === 'object' && typeof process.nextTick === 'function') { + process.nextTick(callback); + } else if (typeof setImmediate === 'function') { + setImmediate(callback); + } else { + setTimeout(callback, 0); + } + }, + + /** + * @api private + */ + getRequestPayloadShape: function getRequestPayloadShape(req) { + var operations = req.service.api.operations; + if (!operations) return undefined; + var operation = (operations || {})[req.operation]; + if (!operation || !operation.input || !operation.input.payload) return undefined; + return operation.input.members[operation.input.payload]; + }, + + getProfilesFromSharedConfig: function getProfilesFromSharedConfig(iniLoader, filename) { + var profiles = {}; + var profilesFromConfig = {}; + if (process.env[util.configOptInEnv]) { + var profilesFromConfig = iniLoader.loadFrom({ + isConfig: true, + filename: process.env[util.sharedConfigFileEnv] + }); + } + var profilesFromCreds= {}; + try { + var profilesFromCreds = iniLoader.loadFrom({ + filename: filename || + (process.env[util.configOptInEnv] && process.env[util.sharedCredentialsFileEnv]) + }); + } catch (error) { + // if using config, assume it is fully descriptive without a credentials file: + if (!process.env[util.configOptInEnv]) throw error; + } + for (var i = 0, profileNames = Object.keys(profilesFromConfig); i < profileNames.length; i++) { + profiles[profileNames[i]] = objectAssign(profiles[profileNames[i]] || {}, profilesFromConfig[profileNames[i]]); + } + for (var i = 0, profileNames = Object.keys(profilesFromCreds); i < profileNames.length; i++) { + profiles[profileNames[i]] = objectAssign(profiles[profileNames[i]] || {}, profilesFromCreds[profileNames[i]]); + } + return profiles; + + /** + * Roughly the semantics of `Object.assign(target, source)` + */ + function objectAssign(target, source) { + for (var i = 0, keys = Object.keys(source); i < keys.length; i++) { + target[keys[i]] = source[keys[i]]; + } + return target; + } + }, + + /** + * @api private + */ + ARN: { + validate: function validateARN(str) { + return str && str.indexOf('arn:') === 0 && str.split(':').length >= 6; + }, + parse: function parseARN(arn) { + var matched = arn.split(':'); + return { + partition: matched[1], + service: matched[2], + region: matched[3], + accountId: matched[4], + resource: matched.slice(5).join(':') + }; + }, + build: function buildARN(arnObject) { + if ( + arnObject.service === undefined || + arnObject.region === undefined || + arnObject.accountId === undefined || + arnObject.resource === undefined + ) throw util.error(new Error('Input ARN object is invalid')); + return 'arn:'+ (arnObject.partition || 'aws') + ':' + arnObject.service + + ':' + arnObject.region + ':' + arnObject.accountId + ':' + arnObject.resource; + } + }, + + /** + * @api private + */ + defaultProfile: 'default', + + /** + * @api private + */ + configOptInEnv: 'AWS_SDK_LOAD_CONFIG', + + /** + * @api private + */ + sharedCredentialsFileEnv: 'AWS_SHARED_CREDENTIALS_FILE', + + /** + * @api private + */ + sharedConfigFileEnv: 'AWS_CONFIG_FILE', + + /** + * @api private + */ + imdsDisabledEnv: 'AWS_EC2_METADATA_DISABLED' +}; + +/** + * @api private + */ +module.exports = util; + + +/***/ }), + +/***/ 23546: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var util = __nccwpck_require__(77985); +var XmlNode = (__nccwpck_require__(20397).XmlNode); +var XmlText = (__nccwpck_require__(90971).XmlText); + +function XmlBuilder() { } + +XmlBuilder.prototype.toXML = function(params, shape, rootElement, noEmpty) { + var xml = new XmlNode(rootElement); + applyNamespaces(xml, shape, true); + serialize(xml, params, shape); + return xml.children.length > 0 || noEmpty ? xml.toString() : ''; +}; + +function serialize(xml, value, shape) { + switch (shape.type) { + case 'structure': return serializeStructure(xml, value, shape); + case 'map': return serializeMap(xml, value, shape); + case 'list': return serializeList(xml, value, shape); + default: return serializeScalar(xml, value, shape); + } +} + +function serializeStructure(xml, params, shape) { + util.arrayEach(shape.memberNames, function(memberName) { + var memberShape = shape.members[memberName]; + if (memberShape.location !== 'body') return; + + var value = params[memberName]; + var name = memberShape.name; + if (value !== undefined && value !== null) { + if (memberShape.isXmlAttribute) { + xml.addAttribute(name, value); + } else if (memberShape.flattened) { + serialize(xml, value, memberShape); + } else { + var element = new XmlNode(name); + xml.addChildNode(element); + applyNamespaces(element, memberShape); + serialize(element, value, memberShape); + } + } + }); +} + +function serializeMap(xml, map, shape) { + var xmlKey = shape.key.name || 'key'; + var xmlValue = shape.value.name || 'value'; + + util.each(map, function(key, value) { + var entry = new XmlNode(shape.flattened ? shape.name : 'entry'); + xml.addChildNode(entry); + + var entryKey = new XmlNode(xmlKey); + var entryValue = new XmlNode(xmlValue); + entry.addChildNode(entryKey); + entry.addChildNode(entryValue); + + serialize(entryKey, key, shape.key); + serialize(entryValue, value, shape.value); + }); +} + +function serializeList(xml, list, shape) { + if (shape.flattened) { + util.arrayEach(list, function(value) { + var name = shape.member.name || shape.name; + var element = new XmlNode(name); + xml.addChildNode(element); + serialize(element, value, shape.member); + }); + } else { + util.arrayEach(list, function(value) { + var name = shape.member.name || 'member'; + var element = new XmlNode(name); + xml.addChildNode(element); + serialize(element, value, shape.member); + }); + } +} + +function serializeScalar(xml, value, shape) { + xml.addChildNode( + new XmlText(shape.toWireFormat(value)) + ); +} + +function applyNamespaces(xml, shape, isRoot) { + var uri, prefix = 'xmlns'; + if (shape.xmlNamespaceUri) { + uri = shape.xmlNamespaceUri; + if (shape.xmlNamespacePrefix) prefix += ':' + shape.xmlNamespacePrefix; + } else if (isRoot && shape.api.xmlNamespaceUri) { + uri = shape.api.xmlNamespaceUri; + } + + if (uri) xml.addAttribute(prefix, uri); +} + +/** + * @api private + */ +module.exports = XmlBuilder; + + +/***/ }), + +/***/ 98241: +/***/ ((module) => { + +/** + * Escapes characters that can not be in an XML attribute. + */ +function escapeAttribute(value) { + return value.replace(/&/g, '&').replace(/'/g, ''').replace(//g, '>').replace(/"/g, '"'); +} + +/** + * @api private + */ +module.exports = { + escapeAttribute: escapeAttribute +}; + + +/***/ }), + +/***/ 98464: +/***/ ((module) => { + +/** + * Escapes characters that can not be in an XML element. + */ +function escapeElement(value) { + return value.replace(/&/g, '&') + .replace(//g, '>') + .replace(/\r/g, ' ') + .replace(/\n/g, ' ') + .replace(/\u0085/g, '…') + .replace(/\u2028/, '
'); +} + +/** + * @api private + */ +module.exports = { + escapeElement: escapeElement +}; + + +/***/ }), + +/***/ 96752: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var AWS = __nccwpck_require__(28437); +var util = AWS.util; +var Shape = AWS.Model.Shape; + +var xml2js = __nccwpck_require__(66189); + +/** + * @api private + */ +var options = { // options passed to xml2js parser + explicitCharkey: false, // undocumented + trim: false, // trim the leading/trailing whitespace from text nodes + normalize: false, // trim interior whitespace inside text nodes + explicitRoot: false, // return the root node in the resulting object? + emptyTag: null, // the default value for empty nodes + explicitArray: true, // always put child nodes in an array + ignoreAttrs: false, // ignore attributes, only create text nodes + mergeAttrs: false, // merge attributes and child elements + validator: null // a callable validator +}; + +function NodeXmlParser() { } + +NodeXmlParser.prototype.parse = function(xml, shape) { + shape = shape || {}; + + var result = null; + var error = null; + + var parser = new xml2js.Parser(options); + parser.parseString(xml, function (e, r) { + error = e; + result = r; + }); + + if (result) { + var data = parseXml(result, shape); + if (result.ResponseMetadata) { + data.ResponseMetadata = parseXml(result.ResponseMetadata[0], {}); + } + return data; + } else if (error) { + throw util.error(error, {code: 'XMLParserError', retryable: true}); + } else { // empty xml document + return parseXml({}, shape); + } +}; + +function parseXml(xml, shape) { + switch (shape.type) { + case 'structure': return parseStructure(xml, shape); + case 'map': return parseMap(xml, shape); + case 'list': return parseList(xml, shape); + case undefined: case null: return parseUnknown(xml); + default: return parseScalar(xml, shape); + } +} + +function parseStructure(xml, shape) { + var data = {}; + if (xml === null) return data; + + util.each(shape.members, function(memberName, memberShape) { + var xmlName = memberShape.name; + if (Object.prototype.hasOwnProperty.call(xml, xmlName) && Array.isArray(xml[xmlName])) { + var xmlChild = xml[xmlName]; + if (!memberShape.flattened) xmlChild = xmlChild[0]; + + data[memberName] = parseXml(xmlChild, memberShape); + } else if (memberShape.isXmlAttribute && + xml.$ && Object.prototype.hasOwnProperty.call(xml.$, xmlName)) { + data[memberName] = parseScalar(xml.$[xmlName], memberShape); + } else if (memberShape.type === 'list' && !shape.api.xmlNoDefaultLists) { + data[memberName] = memberShape.defaultValue; + } + }); + + return data; +} + +function parseMap(xml, shape) { + var data = {}; + if (xml === null) return data; + + var xmlKey = shape.key.name || 'key'; + var xmlValue = shape.value.name || 'value'; + var iterable = shape.flattened ? xml : xml.entry; + + if (Array.isArray(iterable)) { + util.arrayEach(iterable, function(child) { + data[child[xmlKey][0]] = parseXml(child[xmlValue][0], shape.value); + }); + } + + return data; +} + +function parseList(xml, shape) { + var data = []; + var name = shape.member.name || 'member'; + if (shape.flattened) { + util.arrayEach(xml, function(xmlChild) { + data.push(parseXml(xmlChild, shape.member)); + }); + } else if (xml && Array.isArray(xml[name])) { + util.arrayEach(xml[name], function(child) { + data.push(parseXml(child, shape.member)); + }); + } + + return data; +} + +function parseScalar(text, shape) { + if (text && text.$ && text.$.encoding === 'base64') { + shape = new Shape.create({type: text.$.encoding}); + } + if (text && text._) text = text._; + + if (typeof shape.toType === 'function') { + return shape.toType(text); + } else { + return text; + } +} + +function parseUnknown(xml) { + if (xml === undefined || xml === null) return ''; + if (typeof xml === 'string') return xml; + + // parse a list + if (Array.isArray(xml)) { + var arr = []; + for (i = 0; i < xml.length; i++) { + arr.push(parseXml(xml[i], {})); + } + return arr; + } + + // empty object + var keys = Object.keys(xml), i; + if (keys.length === 0 || (keys.length === 1 && keys[0] === '$')) { + return {}; + } + + // object, parse as structure + var data = {}; + for (i = 0; i < keys.length; i++) { + var key = keys[i], value = xml[key]; + if (key === '$') continue; + if (value.length > 1) { // this member is a list + data[key] = parseList(value, {member: {}}); + } else { // this member is a single item + data[key] = parseXml(value[0], {}); + } + } + return data; +} + +/** + * @api private + */ +module.exports = NodeXmlParser; + + +/***/ }), + +/***/ 20397: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var escapeAttribute = (__nccwpck_require__(98241).escapeAttribute); + +/** + * Represents an XML node. + * @api private + */ +function XmlNode(name, children) { + if (children === void 0) { children = []; } + this.name = name; + this.children = children; + this.attributes = {}; +} +XmlNode.prototype.addAttribute = function (name, value) { + this.attributes[name] = value; + return this; +}; +XmlNode.prototype.addChildNode = function (child) { + this.children.push(child); + return this; +}; +XmlNode.prototype.removeAttribute = function (name) { + delete this.attributes[name]; + return this; +}; +XmlNode.prototype.toString = function () { + var hasChildren = Boolean(this.children.length); + var xmlText = '<' + this.name; + // add attributes + var attributes = this.attributes; + for (var i = 0, attributeNames = Object.keys(attributes); i < attributeNames.length; i++) { + var attributeName = attributeNames[i]; + var attribute = attributes[attributeName]; + if (typeof attribute !== 'undefined' && attribute !== null) { + xmlText += ' ' + attributeName + '=\"' + escapeAttribute('' + attribute) + '\"'; + } + } + return xmlText += !hasChildren ? '/>' : '>' + this.children.map(function (c) { return c.toString(); }).join('') + ''; +}; + +/** + * @api private + */ +module.exports = { + XmlNode: XmlNode +}; + + +/***/ }), + +/***/ 90971: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var escapeElement = (__nccwpck_require__(98464).escapeElement); + +/** + * Represents an XML text value. + * @api private + */ +function XmlText(value) { + this.value = value; +} + +XmlText.prototype.toString = function () { + return escapeElement('' + this.value); +}; + +/** + * @api private + */ +module.exports = { + XmlText: XmlText +}; + + +/***/ }), + +/***/ 35827: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +var byteToHex = []; + +for (var i = 0; i < 256; ++i) { + byteToHex[i] = (i + 0x100).toString(16).substr(1); +} + +function bytesToUuid(buf, offset) { + var i = offset || 0; + var bth = byteToHex; // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 + + return [bth[buf[i++]], bth[buf[i++]], bth[buf[i++]], bth[buf[i++]], '-', bth[buf[i++]], bth[buf[i++]], '-', bth[buf[i++]], bth[buf[i++]], '-', bth[buf[i++]], bth[buf[i++]], '-', bth[buf[i++]], bth[buf[i++]], bth[buf[i++]], bth[buf[i++]], bth[buf[i++]], bth[buf[i++]]].join(''); +} + +var _default = bytesToUuid; +exports["default"] = _default; + +/***/ }), + +/***/ 57821: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; +var __webpack_unused_export__; + + +__webpack_unused_export__ = ({ + value: true +}); +__webpack_unused_export__ = ({ + enumerable: true, + get: function () { + return _v.default; + } +}); +__webpack_unused_export__ = ({ + enumerable: true, + get: function () { + return _v2.default; + } +}); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +__webpack_unused_export__ = ({ + enumerable: true, + get: function () { + return _v4.default; + } +}); + +var _v = _interopRequireDefault(__nccwpck_require__(67668)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(98573)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(7811)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(46508)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 93525: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 49788: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function rng() { + return _crypto.default.randomBytes(16); +} + +/***/ }), + +/***/ 7387: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 67668: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(49788)); + +var _bytesToUuid = _interopRequireDefault(__nccwpck_require__(35827)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +var _nodeId; + +var _clockseq; // Previous uuid creation time + + +var _lastMSecs = 0; +var _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + var i = buf && offset || 0; + var b = buf || []; + options = options || {}; + var node = options.node || _nodeId; + var clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + var seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + var msecs = options.msecs !== undefined ? options.msecs : new Date().getTime(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + var nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + var dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + var tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (var n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf ? buf : (0, _bytesToUuid.default)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 98573: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(36097)); + +var _md = _interopRequireDefault(__nccwpck_require__(93525)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 36097: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = _default; +exports.URL = exports.DNS = void 0; + +var _bytesToUuid = _interopRequireDefault(__nccwpck_require__(35827)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function uuidToBytes(uuid) { + // Note: We assume we're being passed a valid uuid string + var bytes = []; + uuid.replace(/[a-fA-F0-9]{2}/g, function (hex) { + bytes.push(parseInt(hex, 16)); + }); + return bytes; +} + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + var bytes = new Array(str.length); + + for (var i = 0; i < str.length; i++) { + bytes[i] = str.charCodeAt(i); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + var generateUUID = function (value, namespace, buf, offset) { + var off = buf && offset || 0; + if (typeof value == 'string') value = stringToBytes(value); + if (typeof namespace == 'string') namespace = uuidToBytes(namespace); + if (!Array.isArray(value)) throw TypeError('value must be an array of bytes'); + if (!Array.isArray(namespace) || namespace.length !== 16) throw TypeError('namespace must be uuid string or an Array of 16 byte values'); // Per 4.3 + + var bytes = hashfunc(namespace.concat(value)); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + for (var idx = 0; idx < 16; ++idx) { + buf[off + idx] = bytes[idx]; + } + } + + return buf || (0, _bytesToUuid.default)(bytes); + }; // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 7811: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(49788)); + +var _bytesToUuid = _interopRequireDefault(__nccwpck_require__(35827)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + var i = buf && offset || 0; + + if (typeof options == 'string') { + buf = options === 'binary' ? new Array(16) : null; + options = null; + } + + options = options || {}; + + var rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + for (var ii = 0; ii < 16; ++ii) { + buf[i + ii] = rnds[ii]; + } + } + + return buf || (0, _bytesToUuid.default)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 46508: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(36097)); + +var _sha = _interopRequireDefault(__nccwpck_require__(7387)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 96323: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; +var __webpack_unused_export__; + +__webpack_unused_export__ = ({ value: true }); +var LRU_1 = __nccwpck_require__(77710); +var CACHE_SIZE = 1000; +/** + * Inspired node-lru-cache[https://github.com/isaacs/node-lru-cache] + */ +var EndpointCache = /** @class */ (function () { + function EndpointCache(maxSize) { + if (maxSize === void 0) { maxSize = CACHE_SIZE; } + this.maxSize = maxSize; + this.cache = new LRU_1.LRUCache(maxSize); + } + ; + Object.defineProperty(EndpointCache.prototype, "size", { + get: function () { + return this.cache.length; + }, + enumerable: true, + configurable: true + }); + EndpointCache.prototype.put = function (key, value) { + var keyString = typeof key !== 'string' ? EndpointCache.getKeyString(key) : key; + var endpointRecord = this.populateValue(value); + this.cache.put(keyString, endpointRecord); + }; + EndpointCache.prototype.get = function (key) { + var keyString = typeof key !== 'string' ? EndpointCache.getKeyString(key) : key; + var now = Date.now(); + var records = this.cache.get(keyString); + if (records) { + for (var i = records.length-1; i >= 0; i--) { + var record = records[i]; + if (record.Expire < now) { + records.splice(i, 1); + } + } + if (records.length === 0) { + this.cache.remove(keyString); + return undefined; + } + } + return records; + }; + EndpointCache.getKeyString = function (key) { + var identifiers = []; + var identifierNames = Object.keys(key).sort(); + for (var i = 0; i < identifierNames.length; i++) { + var identifierName = identifierNames[i]; + if (key[identifierName] === undefined) + continue; + identifiers.push(key[identifierName]); + } + return identifiers.join(' '); + }; + EndpointCache.prototype.populateValue = function (endpoints) { + var now = Date.now(); + return endpoints.map(function (endpoint) { return ({ + Address: endpoint.Address || '', + Expire: now + (endpoint.CachePeriodInMinutes || 1) * 60 * 1000 + }); }); + }; + EndpointCache.prototype.empty = function () { + this.cache.empty(); + }; + EndpointCache.prototype.remove = function (key) { + var keyString = typeof key !== 'string' ? EndpointCache.getKeyString(key) : key; + this.cache.remove(keyString); + }; + return EndpointCache; +}()); +exports.$ = EndpointCache; + +/***/ }), + +/***/ 77710: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +var LinkedListNode = /** @class */ (function () { + function LinkedListNode(key, value) { + this.key = key; + this.value = value; + } + return LinkedListNode; +}()); +var LRUCache = /** @class */ (function () { + function LRUCache(size) { + this.nodeMap = {}; + this.size = 0; + if (typeof size !== 'number' || size < 1) { + throw new Error('Cache size can only be positive number'); + } + this.sizeLimit = size; + } + Object.defineProperty(LRUCache.prototype, "length", { + get: function () { + return this.size; + }, + enumerable: true, + configurable: true + }); + LRUCache.prototype.prependToList = function (node) { + if (!this.headerNode) { + this.tailNode = node; + } + else { + this.headerNode.prev = node; + node.next = this.headerNode; + } + this.headerNode = node; + this.size++; + }; + LRUCache.prototype.removeFromTail = function () { + if (!this.tailNode) { + return undefined; + } + var node = this.tailNode; + var prevNode = node.prev; + if (prevNode) { + prevNode.next = undefined; + } + node.prev = undefined; + this.tailNode = prevNode; + this.size--; + return node; + }; + LRUCache.prototype.detachFromList = function (node) { + if (this.headerNode === node) { + this.headerNode = node.next; + } + if (this.tailNode === node) { + this.tailNode = node.prev; + } + if (node.prev) { + node.prev.next = node.next; + } + if (node.next) { + node.next.prev = node.prev; + } + node.next = undefined; + node.prev = undefined; + this.size--; + }; + LRUCache.prototype.get = function (key) { + if (this.nodeMap[key]) { + var node = this.nodeMap[key]; + this.detachFromList(node); + this.prependToList(node); + return node.value; + } + }; + LRUCache.prototype.remove = function (key) { + if (this.nodeMap[key]) { + var node = this.nodeMap[key]; + this.detachFromList(node); + delete this.nodeMap[key]; + } + }; + LRUCache.prototype.put = function (key, value) { + if (this.nodeMap[key]) { + this.remove(key); + } + else if (this.size === this.sizeLimit) { + var tailNode = this.removeFromTail(); + var key_1 = tailNode.key; + delete this.nodeMap[key_1]; + } + var newNode = new LinkedListNode(key, value); + this.nodeMap[key] = newNode; + this.prependToList(newNode); + }; + LRUCache.prototype.empty = function () { + var keys = Object.keys(this.nodeMap); + for (var i = 0; i < keys.length; i++) { + var key = keys[i]; + var node = this.nodeMap[key]; + this.detachFromList(node); + delete this.nodeMap[key]; + } + }; + return LRUCache; +}()); +exports.LRUCache = LRUCache; + +/***/ }), + +/***/ 83682: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var register = __nccwpck_require__(44670); +var addHook = __nccwpck_require__(5549); +var removeHook = __nccwpck_require__(6819); + +// bind with array of arguments: https://stackoverflow.com/a/21792913 +var bind = Function.bind; +var bindable = bind.bind(bind); + +function bindApi(hook, state, name) { + var removeHookRef = bindable(removeHook, null).apply( + null, + name ? [state, name] : [state] + ); + hook.api = { remove: removeHookRef }; + hook.remove = removeHookRef; + ["before", "error", "after", "wrap"].forEach(function (kind) { + var args = name ? [state, kind, name] : [state, kind]; + hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args); + }); +} + +function HookSingular() { + var singularHookName = "h"; + var singularHookState = { + registry: {}, + }; + var singularHook = register.bind(null, singularHookState, singularHookName); + bindApi(singularHook, singularHookState, singularHookName); + return singularHook; +} + +function HookCollection() { + var state = { + registry: {}, + }; + + var hook = register.bind(null, state); + bindApi(hook, state); + + return hook; +} + +var collectionHookDeprecationMessageDisplayed = false; +function Hook() { + if (!collectionHookDeprecationMessageDisplayed) { + console.warn( + '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4' + ); + collectionHookDeprecationMessageDisplayed = true; + } + return HookCollection(); +} + +Hook.Singular = HookSingular.bind(); +Hook.Collection = HookCollection.bind(); + +module.exports = Hook; +// expose constructors as a named property for TypeScript +module.exports.Hook = Hook; +module.exports.Singular = Hook.Singular; +module.exports.Collection = Hook.Collection; + + +/***/ }), + +/***/ 5549: +/***/ ((module) => { + +module.exports = addHook; + +function addHook(state, kind, name, hook) { + var orig = hook; + if (!state.registry[name]) { + state.registry[name] = []; + } + + if (kind === "before") { + hook = function (method, options) { + return Promise.resolve() + .then(orig.bind(null, options)) + .then(method.bind(null, options)); + }; + } + + if (kind === "after") { + hook = function (method, options) { + var result; + return Promise.resolve() + .then(method.bind(null, options)) + .then(function (result_) { + result = result_; + return orig(result, options); + }) + .then(function () { + return result; + }); + }; + } + + if (kind === "error") { + hook = function (method, options) { + return Promise.resolve() + .then(method.bind(null, options)) + .catch(function (error) { + return orig(error, options); + }); + }; + } + + state.registry[name].push({ + hook: hook, + orig: orig, + }); +} + + +/***/ }), + +/***/ 44670: +/***/ ((module) => { + +module.exports = register; + +function register(state, name, method, options) { + if (typeof method !== "function") { + throw new Error("method for before hook must be a function"); + } + + if (!options) { + options = {}; + } + + if (Array.isArray(name)) { + return name.reverse().reduce(function (callback, name) { + return register.bind(null, state, name, callback, options); + }, method)(); + } + + return Promise.resolve().then(function () { + if (!state.registry[name]) { + return method(options); + } + + return state.registry[name].reduce(function (method, registered) { + return registered.hook.bind(null, method, options); + }, method)(); + }); +} + + +/***/ }), + +/***/ 6819: +/***/ ((module) => { + +module.exports = removeHook; + +function removeHook(state, name, method) { + if (!state.registry[name]) { + return; + } + + var index = state.registry[name] + .map(function (registered) { + return registered.orig; + }) + .indexOf(method); + + if (index === -1) { + return; + } + + state.registry[name].splice(index, 1); +} + + +/***/ }), + +/***/ 58932: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = 'Deprecation'; + } + +} + +exports.Deprecation = Deprecation; + + +/***/ }), + +/***/ 87783: +/***/ ((__unused_webpack_module, exports) => { + +(function(exports) { + "use strict"; + + function isArray(obj) { + if (obj !== null) { + return Object.prototype.toString.call(obj) === "[object Array]"; + } else { + return false; + } + } + + function isObject(obj) { + if (obj !== null) { + return Object.prototype.toString.call(obj) === "[object Object]"; + } else { + return false; + } + } + + function strictDeepEqual(first, second) { + // Check the scalar case first. + if (first === second) { + return true; + } + + // Check if they are the same type. + var firstType = Object.prototype.toString.call(first); + if (firstType !== Object.prototype.toString.call(second)) { + return false; + } + // We know that first and second have the same type so we can just check the + // first type from now on. + if (isArray(first) === true) { + // Short circuit if they're not the same length; + if (first.length !== second.length) { + return false; + } + for (var i = 0; i < first.length; i++) { + if (strictDeepEqual(first[i], second[i]) === false) { + return false; + } + } + return true; + } + if (isObject(first) === true) { + // An object is equal if it has the same key/value pairs. + var keysSeen = {}; + for (var key in first) { + if (hasOwnProperty.call(first, key)) { + if (strictDeepEqual(first[key], second[key]) === false) { + return false; + } + keysSeen[key] = true; + } + } + // Now check that there aren't any keys in second that weren't + // in first. + for (var key2 in second) { + if (hasOwnProperty.call(second, key2)) { + if (keysSeen[key2] !== true) { + return false; + } + } + } + return true; + } + return false; + } + + function isFalse(obj) { + // From the spec: + // A false value corresponds to the following values: + // Empty list + // Empty object + // Empty string + // False boolean + // null value + + // First check the scalar values. + if (obj === "" || obj === false || obj === null) { + return true; + } else if (isArray(obj) && obj.length === 0) { + // Check for an empty array. + return true; + } else if (isObject(obj)) { + // Check for an empty object. + for (var key in obj) { + // If there are any keys, then + // the object is not empty so the object + // is not false. + if (obj.hasOwnProperty(key)) { + return false; + } + } + return true; + } else { + return false; + } + } + + function objValues(obj) { + var keys = Object.keys(obj); + var values = []; + for (var i = 0; i < keys.length; i++) { + values.push(obj[keys[i]]); + } + return values; + } + + function merge(a, b) { + var merged = {}; + for (var key in a) { + merged[key] = a[key]; + } + for (var key2 in b) { + merged[key2] = b[key2]; + } + return merged; + } + + var trimLeft; + if (typeof String.prototype.trimLeft === "function") { + trimLeft = function(str) { + return str.trimLeft(); + }; + } else { + trimLeft = function(str) { + return str.match(/^\s*(.*)/)[1]; + }; + } + + // Type constants used to define functions. + var TYPE_NUMBER = 0; + var TYPE_ANY = 1; + var TYPE_STRING = 2; + var TYPE_ARRAY = 3; + var TYPE_OBJECT = 4; + var TYPE_BOOLEAN = 5; + var TYPE_EXPREF = 6; + var TYPE_NULL = 7; + var TYPE_ARRAY_NUMBER = 8; + var TYPE_ARRAY_STRING = 9; + var TYPE_NAME_TABLE = { + 0: 'number', + 1: 'any', + 2: 'string', + 3: 'array', + 4: 'object', + 5: 'boolean', + 6: 'expression', + 7: 'null', + 8: 'Array', + 9: 'Array' + }; + + var TOK_EOF = "EOF"; + var TOK_UNQUOTEDIDENTIFIER = "UnquotedIdentifier"; + var TOK_QUOTEDIDENTIFIER = "QuotedIdentifier"; + var TOK_RBRACKET = "Rbracket"; + var TOK_RPAREN = "Rparen"; + var TOK_COMMA = "Comma"; + var TOK_COLON = "Colon"; + var TOK_RBRACE = "Rbrace"; + var TOK_NUMBER = "Number"; + var TOK_CURRENT = "Current"; + var TOK_EXPREF = "Expref"; + var TOK_PIPE = "Pipe"; + var TOK_OR = "Or"; + var TOK_AND = "And"; + var TOK_EQ = "EQ"; + var TOK_GT = "GT"; + var TOK_LT = "LT"; + var TOK_GTE = "GTE"; + var TOK_LTE = "LTE"; + var TOK_NE = "NE"; + var TOK_FLATTEN = "Flatten"; + var TOK_STAR = "Star"; + var TOK_FILTER = "Filter"; + var TOK_DOT = "Dot"; + var TOK_NOT = "Not"; + var TOK_LBRACE = "Lbrace"; + var TOK_LBRACKET = "Lbracket"; + var TOK_LPAREN= "Lparen"; + var TOK_LITERAL= "Literal"; + + // The "&", "[", "<", ">" tokens + // are not in basicToken because + // there are two token variants + // ("&&", "[?", "<=", ">="). This is specially handled + // below. + + var basicTokens = { + ".": TOK_DOT, + "*": TOK_STAR, + ",": TOK_COMMA, + ":": TOK_COLON, + "{": TOK_LBRACE, + "}": TOK_RBRACE, + "]": TOK_RBRACKET, + "(": TOK_LPAREN, + ")": TOK_RPAREN, + "@": TOK_CURRENT + }; + + var operatorStartToken = { + "<": true, + ">": true, + "=": true, + "!": true + }; + + var skipChars = { + " ": true, + "\t": true, + "\n": true + }; + + + function isAlpha(ch) { + return (ch >= "a" && ch <= "z") || + (ch >= "A" && ch <= "Z") || + ch === "_"; + } + + function isNum(ch) { + return (ch >= "0" && ch <= "9") || + ch === "-"; + } + function isAlphaNum(ch) { + return (ch >= "a" && ch <= "z") || + (ch >= "A" && ch <= "Z") || + (ch >= "0" && ch <= "9") || + ch === "_"; + } + + function Lexer() { + } + Lexer.prototype = { + tokenize: function(stream) { + var tokens = []; + this._current = 0; + var start; + var identifier; + var token; + while (this._current < stream.length) { + if (isAlpha(stream[this._current])) { + start = this._current; + identifier = this._consumeUnquotedIdentifier(stream); + tokens.push({type: TOK_UNQUOTEDIDENTIFIER, + value: identifier, + start: start}); + } else if (basicTokens[stream[this._current]] !== undefined) { + tokens.push({type: basicTokens[stream[this._current]], + value: stream[this._current], + start: this._current}); + this._current++; + } else if (isNum(stream[this._current])) { + token = this._consumeNumber(stream); + tokens.push(token); + } else if (stream[this._current] === "[") { + // No need to increment this._current. This happens + // in _consumeLBracket + token = this._consumeLBracket(stream); + tokens.push(token); + } else if (stream[this._current] === "\"") { + start = this._current; + identifier = this._consumeQuotedIdentifier(stream); + tokens.push({type: TOK_QUOTEDIDENTIFIER, + value: identifier, + start: start}); + } else if (stream[this._current] === "'") { + start = this._current; + identifier = this._consumeRawStringLiteral(stream); + tokens.push({type: TOK_LITERAL, + value: identifier, + start: start}); + } else if (stream[this._current] === "`") { + start = this._current; + var literal = this._consumeLiteral(stream); + tokens.push({type: TOK_LITERAL, + value: literal, + start: start}); + } else if (operatorStartToken[stream[this._current]] !== undefined) { + tokens.push(this._consumeOperator(stream)); + } else if (skipChars[stream[this._current]] !== undefined) { + // Ignore whitespace. + this._current++; + } else if (stream[this._current] === "&") { + start = this._current; + this._current++; + if (stream[this._current] === "&") { + this._current++; + tokens.push({type: TOK_AND, value: "&&", start: start}); + } else { + tokens.push({type: TOK_EXPREF, value: "&", start: start}); + } + } else if (stream[this._current] === "|") { + start = this._current; + this._current++; + if (stream[this._current] === "|") { + this._current++; + tokens.push({type: TOK_OR, value: "||", start: start}); + } else { + tokens.push({type: TOK_PIPE, value: "|", start: start}); + } + } else { + var error = new Error("Unknown character:" + stream[this._current]); + error.name = "LexerError"; + throw error; + } + } + return tokens; + }, + + _consumeUnquotedIdentifier: function(stream) { + var start = this._current; + this._current++; + while (this._current < stream.length && isAlphaNum(stream[this._current])) { + this._current++; + } + return stream.slice(start, this._current); + }, + + _consumeQuotedIdentifier: function(stream) { + var start = this._current; + this._current++; + var maxLength = stream.length; + while (stream[this._current] !== "\"" && this._current < maxLength) { + // You can escape a double quote and you can escape an escape. + var current = this._current; + if (stream[current] === "\\" && (stream[current + 1] === "\\" || + stream[current + 1] === "\"")) { + current += 2; + } else { + current++; + } + this._current = current; + } + this._current++; + return JSON.parse(stream.slice(start, this._current)); + }, + + _consumeRawStringLiteral: function(stream) { + var start = this._current; + this._current++; + var maxLength = stream.length; + while (stream[this._current] !== "'" && this._current < maxLength) { + // You can escape a single quote and you can escape an escape. + var current = this._current; + if (stream[current] === "\\" && (stream[current + 1] === "\\" || + stream[current + 1] === "'")) { + current += 2; + } else { + current++; + } + this._current = current; + } + this._current++; + var literal = stream.slice(start + 1, this._current - 1); + return literal.replace("\\'", "'"); + }, + + _consumeNumber: function(stream) { + var start = this._current; + this._current++; + var maxLength = stream.length; + while (isNum(stream[this._current]) && this._current < maxLength) { + this._current++; + } + var value = parseInt(stream.slice(start, this._current)); + return {type: TOK_NUMBER, value: value, start: start}; + }, + + _consumeLBracket: function(stream) { + var start = this._current; + this._current++; + if (stream[this._current] === "?") { + this._current++; + return {type: TOK_FILTER, value: "[?", start: start}; + } else if (stream[this._current] === "]") { + this._current++; + return {type: TOK_FLATTEN, value: "[]", start: start}; + } else { + return {type: TOK_LBRACKET, value: "[", start: start}; + } + }, + + _consumeOperator: function(stream) { + var start = this._current; + var startingChar = stream[start]; + this._current++; + if (startingChar === "!") { + if (stream[this._current] === "=") { + this._current++; + return {type: TOK_NE, value: "!=", start: start}; + } else { + return {type: TOK_NOT, value: "!", start: start}; + } + } else if (startingChar === "<") { + if (stream[this._current] === "=") { + this._current++; + return {type: TOK_LTE, value: "<=", start: start}; + } else { + return {type: TOK_LT, value: "<", start: start}; + } + } else if (startingChar === ">") { + if (stream[this._current] === "=") { + this._current++; + return {type: TOK_GTE, value: ">=", start: start}; + } else { + return {type: TOK_GT, value: ">", start: start}; + } + } else if (startingChar === "=") { + if (stream[this._current] === "=") { + this._current++; + return {type: TOK_EQ, value: "==", start: start}; + } + } + }, + + _consumeLiteral: function(stream) { + this._current++; + var start = this._current; + var maxLength = stream.length; + var literal; + while(stream[this._current] !== "`" && this._current < maxLength) { + // You can escape a literal char or you can escape the escape. + var current = this._current; + if (stream[current] === "\\" && (stream[current + 1] === "\\" || + stream[current + 1] === "`")) { + current += 2; + } else { + current++; + } + this._current = current; + } + var literalString = trimLeft(stream.slice(start, this._current)); + literalString = literalString.replace("\\`", "`"); + if (this._looksLikeJSON(literalString)) { + literal = JSON.parse(literalString); + } else { + // Try to JSON parse it as "" + literal = JSON.parse("\"" + literalString + "\""); + } + // +1 gets us to the ending "`", +1 to move on to the next char. + this._current++; + return literal; + }, + + _looksLikeJSON: function(literalString) { + var startingChars = "[{\""; + var jsonLiterals = ["true", "false", "null"]; + var numberLooking = "-0123456789"; + + if (literalString === "") { + return false; + } else if (startingChars.indexOf(literalString[0]) >= 0) { + return true; + } else if (jsonLiterals.indexOf(literalString) >= 0) { + return true; + } else if (numberLooking.indexOf(literalString[0]) >= 0) { + try { + JSON.parse(literalString); + return true; + } catch (ex) { + return false; + } + } else { + return false; + } + } + }; + + var bindingPower = {}; + bindingPower[TOK_EOF] = 0; + bindingPower[TOK_UNQUOTEDIDENTIFIER] = 0; + bindingPower[TOK_QUOTEDIDENTIFIER] = 0; + bindingPower[TOK_RBRACKET] = 0; + bindingPower[TOK_RPAREN] = 0; + bindingPower[TOK_COMMA] = 0; + bindingPower[TOK_RBRACE] = 0; + bindingPower[TOK_NUMBER] = 0; + bindingPower[TOK_CURRENT] = 0; + bindingPower[TOK_EXPREF] = 0; + bindingPower[TOK_PIPE] = 1; + bindingPower[TOK_OR] = 2; + bindingPower[TOK_AND] = 3; + bindingPower[TOK_EQ] = 5; + bindingPower[TOK_GT] = 5; + bindingPower[TOK_LT] = 5; + bindingPower[TOK_GTE] = 5; + bindingPower[TOK_LTE] = 5; + bindingPower[TOK_NE] = 5; + bindingPower[TOK_FLATTEN] = 9; + bindingPower[TOK_STAR] = 20; + bindingPower[TOK_FILTER] = 21; + bindingPower[TOK_DOT] = 40; + bindingPower[TOK_NOT] = 45; + bindingPower[TOK_LBRACE] = 50; + bindingPower[TOK_LBRACKET] = 55; + bindingPower[TOK_LPAREN] = 60; + + function Parser() { + } + + Parser.prototype = { + parse: function(expression) { + this._loadTokens(expression); + this.index = 0; + var ast = this.expression(0); + if (this._lookahead(0) !== TOK_EOF) { + var t = this._lookaheadToken(0); + var error = new Error( + "Unexpected token type: " + t.type + ", value: " + t.value); + error.name = "ParserError"; + throw error; + } + return ast; + }, + + _loadTokens: function(expression) { + var lexer = new Lexer(); + var tokens = lexer.tokenize(expression); + tokens.push({type: TOK_EOF, value: "", start: expression.length}); + this.tokens = tokens; + }, + + expression: function(rbp) { + var leftToken = this._lookaheadToken(0); + this._advance(); + var left = this.nud(leftToken); + var currentToken = this._lookahead(0); + while (rbp < bindingPower[currentToken]) { + this._advance(); + left = this.led(currentToken, left); + currentToken = this._lookahead(0); + } + return left; + }, + + _lookahead: function(number) { + return this.tokens[this.index + number].type; + }, + + _lookaheadToken: function(number) { + return this.tokens[this.index + number]; + }, + + _advance: function() { + this.index++; + }, + + nud: function(token) { + var left; + var right; + var expression; + switch (token.type) { + case TOK_LITERAL: + return {type: "Literal", value: token.value}; + case TOK_UNQUOTEDIDENTIFIER: + return {type: "Field", name: token.value}; + case TOK_QUOTEDIDENTIFIER: + var node = {type: "Field", name: token.value}; + if (this._lookahead(0) === TOK_LPAREN) { + throw new Error("Quoted identifier not allowed for function names."); + } + return node; + case TOK_NOT: + right = this.expression(bindingPower.Not); + return {type: "NotExpression", children: [right]}; + case TOK_STAR: + left = {type: "Identity"}; + right = null; + if (this._lookahead(0) === TOK_RBRACKET) { + // This can happen in a multiselect, + // [a, b, *] + right = {type: "Identity"}; + } else { + right = this._parseProjectionRHS(bindingPower.Star); + } + return {type: "ValueProjection", children: [left, right]}; + case TOK_FILTER: + return this.led(token.type, {type: "Identity"}); + case TOK_LBRACE: + return this._parseMultiselectHash(); + case TOK_FLATTEN: + left = {type: TOK_FLATTEN, children: [{type: "Identity"}]}; + right = this._parseProjectionRHS(bindingPower.Flatten); + return {type: "Projection", children: [left, right]}; + case TOK_LBRACKET: + if (this._lookahead(0) === TOK_NUMBER || this._lookahead(0) === TOK_COLON) { + right = this._parseIndexExpression(); + return this._projectIfSlice({type: "Identity"}, right); + } else if (this._lookahead(0) === TOK_STAR && + this._lookahead(1) === TOK_RBRACKET) { + this._advance(); + this._advance(); + right = this._parseProjectionRHS(bindingPower.Star); + return {type: "Projection", + children: [{type: "Identity"}, right]}; + } + return this._parseMultiselectList(); + case TOK_CURRENT: + return {type: TOK_CURRENT}; + case TOK_EXPREF: + expression = this.expression(bindingPower.Expref); + return {type: "ExpressionReference", children: [expression]}; + case TOK_LPAREN: + var args = []; + while (this._lookahead(0) !== TOK_RPAREN) { + if (this._lookahead(0) === TOK_CURRENT) { + expression = {type: TOK_CURRENT}; + this._advance(); + } else { + expression = this.expression(0); + } + args.push(expression); + } + this._match(TOK_RPAREN); + return args[0]; + default: + this._errorToken(token); + } + }, + + led: function(tokenName, left) { + var right; + switch(tokenName) { + case TOK_DOT: + var rbp = bindingPower.Dot; + if (this._lookahead(0) !== TOK_STAR) { + right = this._parseDotRHS(rbp); + return {type: "Subexpression", children: [left, right]}; + } + // Creating a projection. + this._advance(); + right = this._parseProjectionRHS(rbp); + return {type: "ValueProjection", children: [left, right]}; + case TOK_PIPE: + right = this.expression(bindingPower.Pipe); + return {type: TOK_PIPE, children: [left, right]}; + case TOK_OR: + right = this.expression(bindingPower.Or); + return {type: "OrExpression", children: [left, right]}; + case TOK_AND: + right = this.expression(bindingPower.And); + return {type: "AndExpression", children: [left, right]}; + case TOK_LPAREN: + var name = left.name; + var args = []; + var expression, node; + while (this._lookahead(0) !== TOK_RPAREN) { + if (this._lookahead(0) === TOK_CURRENT) { + expression = {type: TOK_CURRENT}; + this._advance(); + } else { + expression = this.expression(0); + } + if (this._lookahead(0) === TOK_COMMA) { + this._match(TOK_COMMA); + } + args.push(expression); + } + this._match(TOK_RPAREN); + node = {type: "Function", name: name, children: args}; + return node; + case TOK_FILTER: + var condition = this.expression(0); + this._match(TOK_RBRACKET); + if (this._lookahead(0) === TOK_FLATTEN) { + right = {type: "Identity"}; + } else { + right = this._parseProjectionRHS(bindingPower.Filter); + } + return {type: "FilterProjection", children: [left, right, condition]}; + case TOK_FLATTEN: + var leftNode = {type: TOK_FLATTEN, children: [left]}; + var rightNode = this._parseProjectionRHS(bindingPower.Flatten); + return {type: "Projection", children: [leftNode, rightNode]}; + case TOK_EQ: + case TOK_NE: + case TOK_GT: + case TOK_GTE: + case TOK_LT: + case TOK_LTE: + return this._parseComparator(left, tokenName); + case TOK_LBRACKET: + var token = this._lookaheadToken(0); + if (token.type === TOK_NUMBER || token.type === TOK_COLON) { + right = this._parseIndexExpression(); + return this._projectIfSlice(left, right); + } + this._match(TOK_STAR); + this._match(TOK_RBRACKET); + right = this._parseProjectionRHS(bindingPower.Star); + return {type: "Projection", children: [left, right]}; + default: + this._errorToken(this._lookaheadToken(0)); + } + }, + + _match: function(tokenType) { + if (this._lookahead(0) === tokenType) { + this._advance(); + } else { + var t = this._lookaheadToken(0); + var error = new Error("Expected " + tokenType + ", got: " + t.type); + error.name = "ParserError"; + throw error; + } + }, + + _errorToken: function(token) { + var error = new Error("Invalid token (" + + token.type + "): \"" + + token.value + "\""); + error.name = "ParserError"; + throw error; + }, + + + _parseIndexExpression: function() { + if (this._lookahead(0) === TOK_COLON || this._lookahead(1) === TOK_COLON) { + return this._parseSliceExpression(); + } else { + var node = { + type: "Index", + value: this._lookaheadToken(0).value}; + this._advance(); + this._match(TOK_RBRACKET); + return node; + } + }, + + _projectIfSlice: function(left, right) { + var indexExpr = {type: "IndexExpression", children: [left, right]}; + if (right.type === "Slice") { + return { + type: "Projection", + children: [indexExpr, this._parseProjectionRHS(bindingPower.Star)] + }; + } else { + return indexExpr; + } + }, + + _parseSliceExpression: function() { + // [start:end:step] where each part is optional, as well as the last + // colon. + var parts = [null, null, null]; + var index = 0; + var currentToken = this._lookahead(0); + while (currentToken !== TOK_RBRACKET && index < 3) { + if (currentToken === TOK_COLON) { + index++; + this._advance(); + } else if (currentToken === TOK_NUMBER) { + parts[index] = this._lookaheadToken(0).value; + this._advance(); + } else { + var t = this._lookahead(0); + var error = new Error("Syntax error, unexpected token: " + + t.value + "(" + t.type + ")"); + error.name = "Parsererror"; + throw error; + } + currentToken = this._lookahead(0); + } + this._match(TOK_RBRACKET); + return { + type: "Slice", + children: parts + }; + }, + + _parseComparator: function(left, comparator) { + var right = this.expression(bindingPower[comparator]); + return {type: "Comparator", name: comparator, children: [left, right]}; + }, + + _parseDotRHS: function(rbp) { + var lookahead = this._lookahead(0); + var exprTokens = [TOK_UNQUOTEDIDENTIFIER, TOK_QUOTEDIDENTIFIER, TOK_STAR]; + if (exprTokens.indexOf(lookahead) >= 0) { + return this.expression(rbp); + } else if (lookahead === TOK_LBRACKET) { + this._match(TOK_LBRACKET); + return this._parseMultiselectList(); + } else if (lookahead === TOK_LBRACE) { + this._match(TOK_LBRACE); + return this._parseMultiselectHash(); + } + }, + + _parseProjectionRHS: function(rbp) { + var right; + if (bindingPower[this._lookahead(0)] < 10) { + right = {type: "Identity"}; + } else if (this._lookahead(0) === TOK_LBRACKET) { + right = this.expression(rbp); + } else if (this._lookahead(0) === TOK_FILTER) { + right = this.expression(rbp); + } else if (this._lookahead(0) === TOK_DOT) { + this._match(TOK_DOT); + right = this._parseDotRHS(rbp); + } else { + var t = this._lookaheadToken(0); + var error = new Error("Sytanx error, unexpected token: " + + t.value + "(" + t.type + ")"); + error.name = "ParserError"; + throw error; + } + return right; + }, + + _parseMultiselectList: function() { + var expressions = []; + while (this._lookahead(0) !== TOK_RBRACKET) { + var expression = this.expression(0); + expressions.push(expression); + if (this._lookahead(0) === TOK_COMMA) { + this._match(TOK_COMMA); + if (this._lookahead(0) === TOK_RBRACKET) { + throw new Error("Unexpected token Rbracket"); + } + } + } + this._match(TOK_RBRACKET); + return {type: "MultiSelectList", children: expressions}; + }, + + _parseMultiselectHash: function() { + var pairs = []; + var identifierTypes = [TOK_UNQUOTEDIDENTIFIER, TOK_QUOTEDIDENTIFIER]; + var keyToken, keyName, value, node; + for (;;) { + keyToken = this._lookaheadToken(0); + if (identifierTypes.indexOf(keyToken.type) < 0) { + throw new Error("Expecting an identifier token, got: " + + keyToken.type); + } + keyName = keyToken.value; + this._advance(); + this._match(TOK_COLON); + value = this.expression(0); + node = {type: "KeyValuePair", name: keyName, value: value}; + pairs.push(node); + if (this._lookahead(0) === TOK_COMMA) { + this._match(TOK_COMMA); + } else if (this._lookahead(0) === TOK_RBRACE) { + this._match(TOK_RBRACE); + break; + } + } + return {type: "MultiSelectHash", children: pairs}; + } + }; + + + function TreeInterpreter(runtime) { + this.runtime = runtime; + } + + TreeInterpreter.prototype = { + search: function(node, value) { + return this.visit(node, value); + }, + + visit: function(node, value) { + var matched, current, result, first, second, field, left, right, collected, i; + switch (node.type) { + case "Field": + if (value !== null && isObject(value)) { + field = value[node.name]; + if (field === undefined) { + return null; + } else { + return field; + } + } + return null; + case "Subexpression": + result = this.visit(node.children[0], value); + for (i = 1; i < node.children.length; i++) { + result = this.visit(node.children[1], result); + if (result === null) { + return null; + } + } + return result; + case "IndexExpression": + left = this.visit(node.children[0], value); + right = this.visit(node.children[1], left); + return right; + case "Index": + if (!isArray(value)) { + return null; + } + var index = node.value; + if (index < 0) { + index = value.length + index; + } + result = value[index]; + if (result === undefined) { + result = null; + } + return result; + case "Slice": + if (!isArray(value)) { + return null; + } + var sliceParams = node.children.slice(0); + var computed = this.computeSliceParams(value.length, sliceParams); + var start = computed[0]; + var stop = computed[1]; + var step = computed[2]; + result = []; + if (step > 0) { + for (i = start; i < stop; i += step) { + result.push(value[i]); + } + } else { + for (i = start; i > stop; i += step) { + result.push(value[i]); + } + } + return result; + case "Projection": + // Evaluate left child. + var base = this.visit(node.children[0], value); + if (!isArray(base)) { + return null; + } + collected = []; + for (i = 0; i < base.length; i++) { + current = this.visit(node.children[1], base[i]); + if (current !== null) { + collected.push(current); + } + } + return collected; + case "ValueProjection": + // Evaluate left child. + base = this.visit(node.children[0], value); + if (!isObject(base)) { + return null; + } + collected = []; + var values = objValues(base); + for (i = 0; i < values.length; i++) { + current = this.visit(node.children[1], values[i]); + if (current !== null) { + collected.push(current); + } + } + return collected; + case "FilterProjection": + base = this.visit(node.children[0], value); + if (!isArray(base)) { + return null; + } + var filtered = []; + var finalResults = []; + for (i = 0; i < base.length; i++) { + matched = this.visit(node.children[2], base[i]); + if (!isFalse(matched)) { + filtered.push(base[i]); + } + } + for (var j = 0; j < filtered.length; j++) { + current = this.visit(node.children[1], filtered[j]); + if (current !== null) { + finalResults.push(current); + } + } + return finalResults; + case "Comparator": + first = this.visit(node.children[0], value); + second = this.visit(node.children[1], value); + switch(node.name) { + case TOK_EQ: + result = strictDeepEqual(first, second); + break; + case TOK_NE: + result = !strictDeepEqual(first, second); + break; + case TOK_GT: + result = first > second; + break; + case TOK_GTE: + result = first >= second; + break; + case TOK_LT: + result = first < second; + break; + case TOK_LTE: + result = first <= second; + break; + default: + throw new Error("Unknown comparator: " + node.name); + } + return result; + case TOK_FLATTEN: + var original = this.visit(node.children[0], value); + if (!isArray(original)) { + return null; + } + var merged = []; + for (i = 0; i < original.length; i++) { + current = original[i]; + if (isArray(current)) { + merged.push.apply(merged, current); + } else { + merged.push(current); + } + } + return merged; + case "Identity": + return value; + case "MultiSelectList": + if (value === null) { + return null; + } + collected = []; + for (i = 0; i < node.children.length; i++) { + collected.push(this.visit(node.children[i], value)); + } + return collected; + case "MultiSelectHash": + if (value === null) { + return null; + } + collected = {}; + var child; + for (i = 0; i < node.children.length; i++) { + child = node.children[i]; + collected[child.name] = this.visit(child.value, value); + } + return collected; + case "OrExpression": + matched = this.visit(node.children[0], value); + if (isFalse(matched)) { + matched = this.visit(node.children[1], value); + } + return matched; + case "AndExpression": + first = this.visit(node.children[0], value); + + if (isFalse(first) === true) { + return first; + } + return this.visit(node.children[1], value); + case "NotExpression": + first = this.visit(node.children[0], value); + return isFalse(first); + case "Literal": + return node.value; + case TOK_PIPE: + left = this.visit(node.children[0], value); + return this.visit(node.children[1], left); + case TOK_CURRENT: + return value; + case "Function": + var resolvedArgs = []; + for (i = 0; i < node.children.length; i++) { + resolvedArgs.push(this.visit(node.children[i], value)); + } + return this.runtime.callFunction(node.name, resolvedArgs); + case "ExpressionReference": + var refNode = node.children[0]; + // Tag the node with a specific attribute so the type + // checker verify the type. + refNode.jmespathType = TOK_EXPREF; + return refNode; + default: + throw new Error("Unknown node type: " + node.type); + } + }, + + computeSliceParams: function(arrayLength, sliceParams) { + var start = sliceParams[0]; + var stop = sliceParams[1]; + var step = sliceParams[2]; + var computed = [null, null, null]; + if (step === null) { + step = 1; + } else if (step === 0) { + var error = new Error("Invalid slice, step cannot be 0"); + error.name = "RuntimeError"; + throw error; + } + var stepValueNegative = step < 0 ? true : false; + + if (start === null) { + start = stepValueNegative ? arrayLength - 1 : 0; + } else { + start = this.capSliceRange(arrayLength, start, step); + } + + if (stop === null) { + stop = stepValueNegative ? -1 : arrayLength; + } else { + stop = this.capSliceRange(arrayLength, stop, step); + } + computed[0] = start; + computed[1] = stop; + computed[2] = step; + return computed; + }, + + capSliceRange: function(arrayLength, actualValue, step) { + if (actualValue < 0) { + actualValue += arrayLength; + if (actualValue < 0) { + actualValue = step < 0 ? -1 : 0; + } + } else if (actualValue >= arrayLength) { + actualValue = step < 0 ? arrayLength - 1 : arrayLength; + } + return actualValue; + } + + }; + + function Runtime(interpreter) { + this._interpreter = interpreter; + this.functionTable = { + // name: [function, ] + // The can be: + // + // { + // args: [[type1, type2], [type1, type2]], + // variadic: true|false + // } + // + // Each arg in the arg list is a list of valid types + // (if the function is overloaded and supports multiple + // types. If the type is "any" then no type checking + // occurs on the argument. Variadic is optional + // and if not provided is assumed to be false. + abs: {_func: this._functionAbs, _signature: [{types: [TYPE_NUMBER]}]}, + avg: {_func: this._functionAvg, _signature: [{types: [TYPE_ARRAY_NUMBER]}]}, + ceil: {_func: this._functionCeil, _signature: [{types: [TYPE_NUMBER]}]}, + contains: { + _func: this._functionContains, + _signature: [{types: [TYPE_STRING, TYPE_ARRAY]}, + {types: [TYPE_ANY]}]}, + "ends_with": { + _func: this._functionEndsWith, + _signature: [{types: [TYPE_STRING]}, {types: [TYPE_STRING]}]}, + floor: {_func: this._functionFloor, _signature: [{types: [TYPE_NUMBER]}]}, + length: { + _func: this._functionLength, + _signature: [{types: [TYPE_STRING, TYPE_ARRAY, TYPE_OBJECT]}]}, + map: { + _func: this._functionMap, + _signature: [{types: [TYPE_EXPREF]}, {types: [TYPE_ARRAY]}]}, + max: { + _func: this._functionMax, + _signature: [{types: [TYPE_ARRAY_NUMBER, TYPE_ARRAY_STRING]}]}, + "merge": { + _func: this._functionMerge, + _signature: [{types: [TYPE_OBJECT], variadic: true}] + }, + "max_by": { + _func: this._functionMaxBy, + _signature: [{types: [TYPE_ARRAY]}, {types: [TYPE_EXPREF]}] + }, + sum: {_func: this._functionSum, _signature: [{types: [TYPE_ARRAY_NUMBER]}]}, + "starts_with": { + _func: this._functionStartsWith, + _signature: [{types: [TYPE_STRING]}, {types: [TYPE_STRING]}]}, + min: { + _func: this._functionMin, + _signature: [{types: [TYPE_ARRAY_NUMBER, TYPE_ARRAY_STRING]}]}, + "min_by": { + _func: this._functionMinBy, + _signature: [{types: [TYPE_ARRAY]}, {types: [TYPE_EXPREF]}] + }, + type: {_func: this._functionType, _signature: [{types: [TYPE_ANY]}]}, + keys: {_func: this._functionKeys, _signature: [{types: [TYPE_OBJECT]}]}, + values: {_func: this._functionValues, _signature: [{types: [TYPE_OBJECT]}]}, + sort: {_func: this._functionSort, _signature: [{types: [TYPE_ARRAY_STRING, TYPE_ARRAY_NUMBER]}]}, + "sort_by": { + _func: this._functionSortBy, + _signature: [{types: [TYPE_ARRAY]}, {types: [TYPE_EXPREF]}] + }, + join: { + _func: this._functionJoin, + _signature: [ + {types: [TYPE_STRING]}, + {types: [TYPE_ARRAY_STRING]} + ] + }, + reverse: { + _func: this._functionReverse, + _signature: [{types: [TYPE_STRING, TYPE_ARRAY]}]}, + "to_array": {_func: this._functionToArray, _signature: [{types: [TYPE_ANY]}]}, + "to_string": {_func: this._functionToString, _signature: [{types: [TYPE_ANY]}]}, + "to_number": {_func: this._functionToNumber, _signature: [{types: [TYPE_ANY]}]}, + "not_null": { + _func: this._functionNotNull, + _signature: [{types: [TYPE_ANY], variadic: true}] + } + }; + } + + Runtime.prototype = { + callFunction: function(name, resolvedArgs) { + var functionEntry = this.functionTable[name]; + if (functionEntry === undefined) { + throw new Error("Unknown function: " + name + "()"); + } + this._validateArgs(name, resolvedArgs, functionEntry._signature); + return functionEntry._func.call(this, resolvedArgs); + }, + + _validateArgs: function(name, args, signature) { + // Validating the args requires validating + // the correct arity and the correct type of each arg. + // If the last argument is declared as variadic, then we need + // a minimum number of args to be required. Otherwise it has to + // be an exact amount. + var pluralized; + if (signature[signature.length - 1].variadic) { + if (args.length < signature.length) { + pluralized = signature.length === 1 ? " argument" : " arguments"; + throw new Error("ArgumentError: " + name + "() " + + "takes at least" + signature.length + pluralized + + " but received " + args.length); + } + } else if (args.length !== signature.length) { + pluralized = signature.length === 1 ? " argument" : " arguments"; + throw new Error("ArgumentError: " + name + "() " + + "takes " + signature.length + pluralized + + " but received " + args.length); + } + var currentSpec; + var actualType; + var typeMatched; + for (var i = 0; i < signature.length; i++) { + typeMatched = false; + currentSpec = signature[i].types; + actualType = this._getTypeName(args[i]); + for (var j = 0; j < currentSpec.length; j++) { + if (this._typeMatches(actualType, currentSpec[j], args[i])) { + typeMatched = true; + break; + } + } + if (!typeMatched) { + var expected = currentSpec + .map(function(typeIdentifier) { + return TYPE_NAME_TABLE[typeIdentifier]; + }) + .join(','); + throw new Error("TypeError: " + name + "() " + + "expected argument " + (i + 1) + + " to be type " + expected + + " but received type " + + TYPE_NAME_TABLE[actualType] + " instead."); + } + } + }, + + _typeMatches: function(actual, expected, argValue) { + if (expected === TYPE_ANY) { + return true; + } + if (expected === TYPE_ARRAY_STRING || + expected === TYPE_ARRAY_NUMBER || + expected === TYPE_ARRAY) { + // The expected type can either just be array, + // or it can require a specific subtype (array of numbers). + // + // The simplest case is if "array" with no subtype is specified. + if (expected === TYPE_ARRAY) { + return actual === TYPE_ARRAY; + } else if (actual === TYPE_ARRAY) { + // Otherwise we need to check subtypes. + // I think this has potential to be improved. + var subtype; + if (expected === TYPE_ARRAY_NUMBER) { + subtype = TYPE_NUMBER; + } else if (expected === TYPE_ARRAY_STRING) { + subtype = TYPE_STRING; + } + for (var i = 0; i < argValue.length; i++) { + if (!this._typeMatches( + this._getTypeName(argValue[i]), subtype, + argValue[i])) { + return false; + } + } + return true; + } + } else { + return actual === expected; + } + }, + _getTypeName: function(obj) { + switch (Object.prototype.toString.call(obj)) { + case "[object String]": + return TYPE_STRING; + case "[object Number]": + return TYPE_NUMBER; + case "[object Array]": + return TYPE_ARRAY; + case "[object Boolean]": + return TYPE_BOOLEAN; + case "[object Null]": + return TYPE_NULL; + case "[object Object]": + // Check if it's an expref. If it has, it's been + // tagged with a jmespathType attr of 'Expref'; + if (obj.jmespathType === TOK_EXPREF) { + return TYPE_EXPREF; + } else { + return TYPE_OBJECT; + } + } + }, + + _functionStartsWith: function(resolvedArgs) { + return resolvedArgs[0].lastIndexOf(resolvedArgs[1]) === 0; + }, + + _functionEndsWith: function(resolvedArgs) { + var searchStr = resolvedArgs[0]; + var suffix = resolvedArgs[1]; + return searchStr.indexOf(suffix, searchStr.length - suffix.length) !== -1; + }, + + _functionReverse: function(resolvedArgs) { + var typeName = this._getTypeName(resolvedArgs[0]); + if (typeName === TYPE_STRING) { + var originalStr = resolvedArgs[0]; + var reversedStr = ""; + for (var i = originalStr.length - 1; i >= 0; i--) { + reversedStr += originalStr[i]; + } + return reversedStr; + } else { + var reversedArray = resolvedArgs[0].slice(0); + reversedArray.reverse(); + return reversedArray; + } + }, + + _functionAbs: function(resolvedArgs) { + return Math.abs(resolvedArgs[0]); + }, + + _functionCeil: function(resolvedArgs) { + return Math.ceil(resolvedArgs[0]); + }, + + _functionAvg: function(resolvedArgs) { + var sum = 0; + var inputArray = resolvedArgs[0]; + for (var i = 0; i < inputArray.length; i++) { + sum += inputArray[i]; + } + return sum / inputArray.length; + }, + + _functionContains: function(resolvedArgs) { + return resolvedArgs[0].indexOf(resolvedArgs[1]) >= 0; + }, + + _functionFloor: function(resolvedArgs) { + return Math.floor(resolvedArgs[0]); + }, + + _functionLength: function(resolvedArgs) { + if (!isObject(resolvedArgs[0])) { + return resolvedArgs[0].length; + } else { + // As far as I can tell, there's no way to get the length + // of an object without O(n) iteration through the object. + return Object.keys(resolvedArgs[0]).length; + } + }, + + _functionMap: function(resolvedArgs) { + var mapped = []; + var interpreter = this._interpreter; + var exprefNode = resolvedArgs[0]; + var elements = resolvedArgs[1]; + for (var i = 0; i < elements.length; i++) { + mapped.push(interpreter.visit(exprefNode, elements[i])); + } + return mapped; + }, + + _functionMerge: function(resolvedArgs) { + var merged = {}; + for (var i = 0; i < resolvedArgs.length; i++) { + var current = resolvedArgs[i]; + for (var key in current) { + merged[key] = current[key]; + } + } + return merged; + }, + + _functionMax: function(resolvedArgs) { + if (resolvedArgs[0].length > 0) { + var typeName = this._getTypeName(resolvedArgs[0][0]); + if (typeName === TYPE_NUMBER) { + return Math.max.apply(Math, resolvedArgs[0]); + } else { + var elements = resolvedArgs[0]; + var maxElement = elements[0]; + for (var i = 1; i < elements.length; i++) { + if (maxElement.localeCompare(elements[i]) < 0) { + maxElement = elements[i]; + } + } + return maxElement; + } + } else { + return null; + } + }, + + _functionMin: function(resolvedArgs) { + if (resolvedArgs[0].length > 0) { + var typeName = this._getTypeName(resolvedArgs[0][0]); + if (typeName === TYPE_NUMBER) { + return Math.min.apply(Math, resolvedArgs[0]); + } else { + var elements = resolvedArgs[0]; + var minElement = elements[0]; + for (var i = 1; i < elements.length; i++) { + if (elements[i].localeCompare(minElement) < 0) { + minElement = elements[i]; + } + } + return minElement; + } + } else { + return null; + } + }, + + _functionSum: function(resolvedArgs) { + var sum = 0; + var listToSum = resolvedArgs[0]; + for (var i = 0; i < listToSum.length; i++) { + sum += listToSum[i]; + } + return sum; + }, + + _functionType: function(resolvedArgs) { + switch (this._getTypeName(resolvedArgs[0])) { + case TYPE_NUMBER: + return "number"; + case TYPE_STRING: + return "string"; + case TYPE_ARRAY: + return "array"; + case TYPE_OBJECT: + return "object"; + case TYPE_BOOLEAN: + return "boolean"; + case TYPE_EXPREF: + return "expref"; + case TYPE_NULL: + return "null"; + } + }, + + _functionKeys: function(resolvedArgs) { + return Object.keys(resolvedArgs[0]); + }, + + _functionValues: function(resolvedArgs) { + var obj = resolvedArgs[0]; + var keys = Object.keys(obj); + var values = []; + for (var i = 0; i < keys.length; i++) { + values.push(obj[keys[i]]); + } + return values; + }, + + _functionJoin: function(resolvedArgs) { + var joinChar = resolvedArgs[0]; + var listJoin = resolvedArgs[1]; + return listJoin.join(joinChar); + }, + + _functionToArray: function(resolvedArgs) { + if (this._getTypeName(resolvedArgs[0]) === TYPE_ARRAY) { + return resolvedArgs[0]; + } else { + return [resolvedArgs[0]]; + } + }, + + _functionToString: function(resolvedArgs) { + if (this._getTypeName(resolvedArgs[0]) === TYPE_STRING) { + return resolvedArgs[0]; + } else { + return JSON.stringify(resolvedArgs[0]); + } + }, + + _functionToNumber: function(resolvedArgs) { + var typeName = this._getTypeName(resolvedArgs[0]); + var convertedValue; + if (typeName === TYPE_NUMBER) { + return resolvedArgs[0]; + } else if (typeName === TYPE_STRING) { + convertedValue = +resolvedArgs[0]; + if (!isNaN(convertedValue)) { + return convertedValue; + } + } + return null; + }, + + _functionNotNull: function(resolvedArgs) { + for (var i = 0; i < resolvedArgs.length; i++) { + if (this._getTypeName(resolvedArgs[i]) !== TYPE_NULL) { + return resolvedArgs[i]; + } + } + return null; + }, + + _functionSort: function(resolvedArgs) { + var sortedArray = resolvedArgs[0].slice(0); + sortedArray.sort(); + return sortedArray; + }, + + _functionSortBy: function(resolvedArgs) { + var sortedArray = resolvedArgs[0].slice(0); + if (sortedArray.length === 0) { + return sortedArray; + } + var interpreter = this._interpreter; + var exprefNode = resolvedArgs[1]; + var requiredType = this._getTypeName( + interpreter.visit(exprefNode, sortedArray[0])); + if ([TYPE_NUMBER, TYPE_STRING].indexOf(requiredType) < 0) { + throw new Error("TypeError"); + } + var that = this; + // In order to get a stable sort out of an unstable + // sort algorithm, we decorate/sort/undecorate (DSU) + // by creating a new list of [index, element] pairs. + // In the cmp function, if the evaluated elements are + // equal, then the index will be used as the tiebreaker. + // After the decorated list has been sorted, it will be + // undecorated to extract the original elements. + var decorated = []; + for (var i = 0; i < sortedArray.length; i++) { + decorated.push([i, sortedArray[i]]); + } + decorated.sort(function(a, b) { + var exprA = interpreter.visit(exprefNode, a[1]); + var exprB = interpreter.visit(exprefNode, b[1]); + if (that._getTypeName(exprA) !== requiredType) { + throw new Error( + "TypeError: expected " + requiredType + ", received " + + that._getTypeName(exprA)); + } else if (that._getTypeName(exprB) !== requiredType) { + throw new Error( + "TypeError: expected " + requiredType + ", received " + + that._getTypeName(exprB)); + } + if (exprA > exprB) { + return 1; + } else if (exprA < exprB) { + return -1; + } else { + // If they're equal compare the items by their + // order to maintain relative order of equal keys + // (i.e. to get a stable sort). + return a[0] - b[0]; + } + }); + // Undecorate: extract out the original list elements. + for (var j = 0; j < decorated.length; j++) { + sortedArray[j] = decorated[j][1]; + } + return sortedArray; + }, + + _functionMaxBy: function(resolvedArgs) { + var exprefNode = resolvedArgs[1]; + var resolvedArray = resolvedArgs[0]; + var keyFunction = this.createKeyFunction(exprefNode, [TYPE_NUMBER, TYPE_STRING]); + var maxNumber = -Infinity; + var maxRecord; + var current; + for (var i = 0; i < resolvedArray.length; i++) { + current = keyFunction(resolvedArray[i]); + if (current > maxNumber) { + maxNumber = current; + maxRecord = resolvedArray[i]; + } + } + return maxRecord; + }, + + _functionMinBy: function(resolvedArgs) { + var exprefNode = resolvedArgs[1]; + var resolvedArray = resolvedArgs[0]; + var keyFunction = this.createKeyFunction(exprefNode, [TYPE_NUMBER, TYPE_STRING]); + var minNumber = Infinity; + var minRecord; + var current; + for (var i = 0; i < resolvedArray.length; i++) { + current = keyFunction(resolvedArray[i]); + if (current < minNumber) { + minNumber = current; + minRecord = resolvedArray[i]; + } + } + return minRecord; + }, + + createKeyFunction: function(exprefNode, allowedTypes) { + var that = this; + var interpreter = this._interpreter; + var keyFunc = function(x) { + var current = interpreter.visit(exprefNode, x); + if (allowedTypes.indexOf(that._getTypeName(current)) < 0) { + var msg = "TypeError: expected one of " + allowedTypes + + ", received " + that._getTypeName(current); + throw new Error(msg); + } + return current; + }; + return keyFunc; + } + + }; + + function compile(stream) { + var parser = new Parser(); + var ast = parser.parse(stream); + return ast; + } + + function tokenize(stream) { + var lexer = new Lexer(); + return lexer.tokenize(stream); + } + + function search(data, expression) { + var parser = new Parser(); + // This needs to be improved. Both the interpreter and runtime depend on + // each other. The runtime needs the interpreter to support exprefs. + // There's likely a clean way to avoid the cyclic dependency. + var runtime = new Runtime(); + var interpreter = new TreeInterpreter(runtime); + runtime._interpreter = interpreter; + var node = parser.parse(expression); + return interpreter.search(node, data); + } + + exports.tokenize = tokenize; + exports.compile = compile; + exports.search = search; + exports.strictDeepEqual = strictDeepEqual; +})( false ? 0 : exports); + + +/***/ }), + +/***/ 90250: +/***/ (function(module, exports, __nccwpck_require__) { + +/* module decorator */ module = __nccwpck_require__.nmd(module); +/** + * @license + * Lodash + * Copyright OpenJS Foundation and other contributors + * Released under MIT license + * Based on Underscore.js 1.8.3 + * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + */ +;(function() { + + /** Used as a safe reference for `undefined` in pre-ES5 environments. */ + var undefined; + + /** Used as the semantic version number. */ + var VERSION = '4.17.21'; + + /** Used as the size to enable large array optimizations. */ + var LARGE_ARRAY_SIZE = 200; + + /** Error message constants. */ + var CORE_ERROR_TEXT = 'Unsupported core-js use. Try https://npms.io/search?q=ponyfill.', + FUNC_ERROR_TEXT = 'Expected a function', + INVALID_TEMPL_VAR_ERROR_TEXT = 'Invalid `variable` option passed into `_.template`'; + + /** Used to stand-in for `undefined` hash values. */ + var HASH_UNDEFINED = '__lodash_hash_undefined__'; + + /** Used as the maximum memoize cache size. */ + var MAX_MEMOIZE_SIZE = 500; + + /** Used as the internal argument placeholder. */ + var PLACEHOLDER = '__lodash_placeholder__'; + + /** Used to compose bitmasks for cloning. */ + var CLONE_DEEP_FLAG = 1, + CLONE_FLAT_FLAG = 2, + CLONE_SYMBOLS_FLAG = 4; + + /** Used to compose bitmasks for value comparisons. */ + var COMPARE_PARTIAL_FLAG = 1, + COMPARE_UNORDERED_FLAG = 2; + + /** Used to compose bitmasks for function metadata. */ + var WRAP_BIND_FLAG = 1, + WRAP_BIND_KEY_FLAG = 2, + WRAP_CURRY_BOUND_FLAG = 4, + WRAP_CURRY_FLAG = 8, + WRAP_CURRY_RIGHT_FLAG = 16, + WRAP_PARTIAL_FLAG = 32, + WRAP_PARTIAL_RIGHT_FLAG = 64, + WRAP_ARY_FLAG = 128, + WRAP_REARG_FLAG = 256, + WRAP_FLIP_FLAG = 512; + + /** Used as default options for `_.truncate`. */ + var DEFAULT_TRUNC_LENGTH = 30, + DEFAULT_TRUNC_OMISSION = '...'; + + /** Used to detect hot functions by number of calls within a span of milliseconds. */ + var HOT_COUNT = 800, + HOT_SPAN = 16; + + /** Used to indicate the type of lazy iteratees. */ + var LAZY_FILTER_FLAG = 1, + LAZY_MAP_FLAG = 2, + LAZY_WHILE_FLAG = 3; + + /** Used as references for various `Number` constants. */ + var INFINITY = 1 / 0, + MAX_SAFE_INTEGER = 9007199254740991, + MAX_INTEGER = 1.7976931348623157e+308, + NAN = 0 / 0; + + /** Used as references for the maximum length and index of an array. */ + var MAX_ARRAY_LENGTH = 4294967295, + MAX_ARRAY_INDEX = MAX_ARRAY_LENGTH - 1, + HALF_MAX_ARRAY_LENGTH = MAX_ARRAY_LENGTH >>> 1; + + /** Used to associate wrap methods with their bit flags. */ + var wrapFlags = [ + ['ary', WRAP_ARY_FLAG], + ['bind', WRAP_BIND_FLAG], + ['bindKey', WRAP_BIND_KEY_FLAG], + ['curry', WRAP_CURRY_FLAG], + ['curryRight', WRAP_CURRY_RIGHT_FLAG], + ['flip', WRAP_FLIP_FLAG], + ['partial', WRAP_PARTIAL_FLAG], + ['partialRight', WRAP_PARTIAL_RIGHT_FLAG], + ['rearg', WRAP_REARG_FLAG] + ]; + + /** `Object#toString` result references. */ + var argsTag = '[object Arguments]', + arrayTag = '[object Array]', + asyncTag = '[object AsyncFunction]', + boolTag = '[object Boolean]', + dateTag = '[object Date]', + domExcTag = '[object DOMException]', + errorTag = '[object Error]', + funcTag = '[object Function]', + genTag = '[object GeneratorFunction]', + mapTag = '[object Map]', + numberTag = '[object Number]', + nullTag = '[object Null]', + objectTag = '[object Object]', + promiseTag = '[object Promise]', + proxyTag = '[object Proxy]', + regexpTag = '[object RegExp]', + setTag = '[object Set]', + stringTag = '[object String]', + symbolTag = '[object Symbol]', + undefinedTag = '[object Undefined]', + weakMapTag = '[object WeakMap]', + weakSetTag = '[object WeakSet]'; + + var arrayBufferTag = '[object ArrayBuffer]', + dataViewTag = '[object DataView]', + float32Tag = '[object Float32Array]', + float64Tag = '[object Float64Array]', + int8Tag = '[object Int8Array]', + int16Tag = '[object Int16Array]', + int32Tag = '[object Int32Array]', + uint8Tag = '[object Uint8Array]', + uint8ClampedTag = '[object Uint8ClampedArray]', + uint16Tag = '[object Uint16Array]', + uint32Tag = '[object Uint32Array]'; + + /** Used to match empty string literals in compiled template source. */ + var reEmptyStringLeading = /\b__p \+= '';/g, + reEmptyStringMiddle = /\b(__p \+=) '' \+/g, + reEmptyStringTrailing = /(__e\(.*?\)|\b__t\)) \+\n'';/g; + + /** Used to match HTML entities and HTML characters. */ + var reEscapedHtml = /&(?:amp|lt|gt|quot|#39);/g, + reUnescapedHtml = /[&<>"']/g, + reHasEscapedHtml = RegExp(reEscapedHtml.source), + reHasUnescapedHtml = RegExp(reUnescapedHtml.source); + + /** Used to match template delimiters. */ + var reEscape = /<%-([\s\S]+?)%>/g, + reEvaluate = /<%([\s\S]+?)%>/g, + reInterpolate = /<%=([\s\S]+?)%>/g; + + /** Used to match property names within property paths. */ + var reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/, + reIsPlainProp = /^\w*$/, + rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g; + + /** + * Used to match `RegExp` + * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). + */ + var reRegExpChar = /[\\^$.*+?()[\]{}|]/g, + reHasRegExpChar = RegExp(reRegExpChar.source); + + /** Used to match leading whitespace. */ + var reTrimStart = /^\s+/; + + /** Used to match a single whitespace character. */ + var reWhitespace = /\s/; + + /** Used to match wrap detail comments. */ + var reWrapComment = /\{(?:\n\/\* \[wrapped with .+\] \*\/)?\n?/, + reWrapDetails = /\{\n\/\* \[wrapped with (.+)\] \*/, + reSplitDetails = /,? & /; + + /** Used to match words composed of alphanumeric characters. */ + var reAsciiWord = /[^\x00-\x2f\x3a-\x40\x5b-\x60\x7b-\x7f]+/g; + + /** + * Used to validate the `validate` option in `_.template` variable. + * + * Forbids characters which could potentially change the meaning of the function argument definition: + * - "()," (modification of function parameters) + * - "=" (default value) + * - "[]{}" (destructuring of function parameters) + * - "/" (beginning of a comment) + * - whitespace + */ + var reForbiddenIdentifierChars = /[()=,{}\[\]\/\s]/; + + /** Used to match backslashes in property paths. */ + var reEscapeChar = /\\(\\)?/g; + + /** + * Used to match + * [ES template delimiters](http://ecma-international.org/ecma-262/7.0/#sec-template-literal-lexical-components). + */ + var reEsTemplate = /\$\{([^\\}]*(?:\\.[^\\}]*)*)\}/g; + + /** Used to match `RegExp` flags from their coerced string values. */ + var reFlags = /\w*$/; + + /** Used to detect bad signed hexadecimal string values. */ + var reIsBadHex = /^[-+]0x[0-9a-f]+$/i; + + /** Used to detect binary string values. */ + var reIsBinary = /^0b[01]+$/i; + + /** Used to detect host constructors (Safari). */ + var reIsHostCtor = /^\[object .+?Constructor\]$/; + + /** Used to detect octal string values. */ + var reIsOctal = /^0o[0-7]+$/i; + + /** Used to detect unsigned integer values. */ + var reIsUint = /^(?:0|[1-9]\d*)$/; + + /** Used to match Latin Unicode letters (excluding mathematical operators). */ + var reLatin = /[\xc0-\xd6\xd8-\xf6\xf8-\xff\u0100-\u017f]/g; + + /** Used to ensure capturing order of template delimiters. */ + var reNoMatch = /($^)/; + + /** Used to match unescaped characters in compiled string literals. */ + var reUnescapedString = /['\n\r\u2028\u2029\\]/g; + + /** Used to compose unicode character classes. */ + var rsAstralRange = '\\ud800-\\udfff', + rsComboMarksRange = '\\u0300-\\u036f', + reComboHalfMarksRange = '\\ufe20-\\ufe2f', + rsComboSymbolsRange = '\\u20d0-\\u20ff', + rsComboRange = rsComboMarksRange + reComboHalfMarksRange + rsComboSymbolsRange, + rsDingbatRange = '\\u2700-\\u27bf', + rsLowerRange = 'a-z\\xdf-\\xf6\\xf8-\\xff', + rsMathOpRange = '\\xac\\xb1\\xd7\\xf7', + rsNonCharRange = '\\x00-\\x2f\\x3a-\\x40\\x5b-\\x60\\x7b-\\xbf', + rsPunctuationRange = '\\u2000-\\u206f', + rsSpaceRange = ' \\t\\x0b\\f\\xa0\\ufeff\\n\\r\\u2028\\u2029\\u1680\\u180e\\u2000\\u2001\\u2002\\u2003\\u2004\\u2005\\u2006\\u2007\\u2008\\u2009\\u200a\\u202f\\u205f\\u3000', + rsUpperRange = 'A-Z\\xc0-\\xd6\\xd8-\\xde', + rsVarRange = '\\ufe0e\\ufe0f', + rsBreakRange = rsMathOpRange + rsNonCharRange + rsPunctuationRange + rsSpaceRange; + + /** Used to compose unicode capture groups. */ + var rsApos = "['\u2019]", + rsAstral = '[' + rsAstralRange + ']', + rsBreak = '[' + rsBreakRange + ']', + rsCombo = '[' + rsComboRange + ']', + rsDigits = '\\d+', + rsDingbat = '[' + rsDingbatRange + ']', + rsLower = '[' + rsLowerRange + ']', + rsMisc = '[^' + rsAstralRange + rsBreakRange + rsDigits + rsDingbatRange + rsLowerRange + rsUpperRange + ']', + rsFitz = '\\ud83c[\\udffb-\\udfff]', + rsModifier = '(?:' + rsCombo + '|' + rsFitz + ')', + rsNonAstral = '[^' + rsAstralRange + ']', + rsRegional = '(?:\\ud83c[\\udde6-\\uddff]){2}', + rsSurrPair = '[\\ud800-\\udbff][\\udc00-\\udfff]', + rsUpper = '[' + rsUpperRange + ']', + rsZWJ = '\\u200d'; + + /** Used to compose unicode regexes. */ + var rsMiscLower = '(?:' + rsLower + '|' + rsMisc + ')', + rsMiscUpper = '(?:' + rsUpper + '|' + rsMisc + ')', + rsOptContrLower = '(?:' + rsApos + '(?:d|ll|m|re|s|t|ve))?', + rsOptContrUpper = '(?:' + rsApos + '(?:D|LL|M|RE|S|T|VE))?', + reOptMod = rsModifier + '?', + rsOptVar = '[' + rsVarRange + ']?', + rsOptJoin = '(?:' + rsZWJ + '(?:' + [rsNonAstral, rsRegional, rsSurrPair].join('|') + ')' + rsOptVar + reOptMod + ')*', + rsOrdLower = '\\d*(?:1st|2nd|3rd|(?![123])\\dth)(?=\\b|[A-Z_])', + rsOrdUpper = '\\d*(?:1ST|2ND|3RD|(?![123])\\dTH)(?=\\b|[a-z_])', + rsSeq = rsOptVar + reOptMod + rsOptJoin, + rsEmoji = '(?:' + [rsDingbat, rsRegional, rsSurrPair].join('|') + ')' + rsSeq, + rsSymbol = '(?:' + [rsNonAstral + rsCombo + '?', rsCombo, rsRegional, rsSurrPair, rsAstral].join('|') + ')'; + + /** Used to match apostrophes. */ + var reApos = RegExp(rsApos, 'g'); + + /** + * Used to match [combining diacritical marks](https://en.wikipedia.org/wiki/Combining_Diacritical_Marks) and + * [combining diacritical marks for symbols](https://en.wikipedia.org/wiki/Combining_Diacritical_Marks_for_Symbols). + */ + var reComboMark = RegExp(rsCombo, 'g'); + + /** Used to match [string symbols](https://mathiasbynens.be/notes/javascript-unicode). */ + var reUnicode = RegExp(rsFitz + '(?=' + rsFitz + ')|' + rsSymbol + rsSeq, 'g'); + + /** Used to match complex or compound words. */ + var reUnicodeWord = RegExp([ + rsUpper + '?' + rsLower + '+' + rsOptContrLower + '(?=' + [rsBreak, rsUpper, '$'].join('|') + ')', + rsMiscUpper + '+' + rsOptContrUpper + '(?=' + [rsBreak, rsUpper + rsMiscLower, '$'].join('|') + ')', + rsUpper + '?' + rsMiscLower + '+' + rsOptContrLower, + rsUpper + '+' + rsOptContrUpper, + rsOrdUpper, + rsOrdLower, + rsDigits, + rsEmoji + ].join('|'), 'g'); + + /** Used to detect strings with [zero-width joiners or code points from the astral planes](http://eev.ee/blog/2015/09/12/dark-corners-of-unicode/). */ + var reHasUnicode = RegExp('[' + rsZWJ + rsAstralRange + rsComboRange + rsVarRange + ']'); + + /** Used to detect strings that need a more robust regexp to match words. */ + var reHasUnicodeWord = /[a-z][A-Z]|[A-Z]{2}[a-z]|[0-9][a-zA-Z]|[a-zA-Z][0-9]|[^a-zA-Z0-9 ]/; + + /** Used to assign default `context` object properties. */ + var contextProps = [ + 'Array', 'Buffer', 'DataView', 'Date', 'Error', 'Float32Array', 'Float64Array', + 'Function', 'Int8Array', 'Int16Array', 'Int32Array', 'Map', 'Math', 'Object', + 'Promise', 'RegExp', 'Set', 'String', 'Symbol', 'TypeError', 'Uint8Array', + 'Uint8ClampedArray', 'Uint16Array', 'Uint32Array', 'WeakMap', + '_', 'clearTimeout', 'isFinite', 'parseInt', 'setTimeout' + ]; + + /** Used to make template sourceURLs easier to identify. */ + var templateCounter = -1; + + /** Used to identify `toStringTag` values of typed arrays. */ + var typedArrayTags = {}; + typedArrayTags[float32Tag] = typedArrayTags[float64Tag] = + typedArrayTags[int8Tag] = typedArrayTags[int16Tag] = + typedArrayTags[int32Tag] = typedArrayTags[uint8Tag] = + typedArrayTags[uint8ClampedTag] = typedArrayTags[uint16Tag] = + typedArrayTags[uint32Tag] = true; + typedArrayTags[argsTag] = typedArrayTags[arrayTag] = + typedArrayTags[arrayBufferTag] = typedArrayTags[boolTag] = + typedArrayTags[dataViewTag] = typedArrayTags[dateTag] = + typedArrayTags[errorTag] = typedArrayTags[funcTag] = + typedArrayTags[mapTag] = typedArrayTags[numberTag] = + typedArrayTags[objectTag] = typedArrayTags[regexpTag] = + typedArrayTags[setTag] = typedArrayTags[stringTag] = + typedArrayTags[weakMapTag] = false; + + /** Used to identify `toStringTag` values supported by `_.clone`. */ + var cloneableTags = {}; + cloneableTags[argsTag] = cloneableTags[arrayTag] = + cloneableTags[arrayBufferTag] = cloneableTags[dataViewTag] = + cloneableTags[boolTag] = cloneableTags[dateTag] = + cloneableTags[float32Tag] = cloneableTags[float64Tag] = + cloneableTags[int8Tag] = cloneableTags[int16Tag] = + cloneableTags[int32Tag] = cloneableTags[mapTag] = + cloneableTags[numberTag] = cloneableTags[objectTag] = + cloneableTags[regexpTag] = cloneableTags[setTag] = + cloneableTags[stringTag] = cloneableTags[symbolTag] = + cloneableTags[uint8Tag] = cloneableTags[uint8ClampedTag] = + cloneableTags[uint16Tag] = cloneableTags[uint32Tag] = true; + cloneableTags[errorTag] = cloneableTags[funcTag] = + cloneableTags[weakMapTag] = false; + + /** Used to map Latin Unicode letters to basic Latin letters. */ + var deburredLetters = { + // Latin-1 Supplement block. + '\xc0': 'A', '\xc1': 'A', '\xc2': 'A', '\xc3': 'A', '\xc4': 'A', '\xc5': 'A', + '\xe0': 'a', '\xe1': 'a', '\xe2': 'a', '\xe3': 'a', '\xe4': 'a', '\xe5': 'a', + '\xc7': 'C', '\xe7': 'c', + '\xd0': 'D', '\xf0': 'd', + '\xc8': 'E', '\xc9': 'E', '\xca': 'E', '\xcb': 'E', + '\xe8': 'e', '\xe9': 'e', '\xea': 'e', '\xeb': 'e', + '\xcc': 'I', '\xcd': 'I', '\xce': 'I', '\xcf': 'I', + '\xec': 'i', '\xed': 'i', '\xee': 'i', '\xef': 'i', + '\xd1': 'N', '\xf1': 'n', + '\xd2': 'O', '\xd3': 'O', '\xd4': 'O', '\xd5': 'O', '\xd6': 'O', '\xd8': 'O', + '\xf2': 'o', '\xf3': 'o', '\xf4': 'o', '\xf5': 'o', '\xf6': 'o', '\xf8': 'o', + '\xd9': 'U', '\xda': 'U', '\xdb': 'U', '\xdc': 'U', + '\xf9': 'u', '\xfa': 'u', '\xfb': 'u', '\xfc': 'u', + '\xdd': 'Y', '\xfd': 'y', '\xff': 'y', + '\xc6': 'Ae', '\xe6': 'ae', + '\xde': 'Th', '\xfe': 'th', + '\xdf': 'ss', + // Latin Extended-A block. + '\u0100': 'A', '\u0102': 'A', '\u0104': 'A', + '\u0101': 'a', '\u0103': 'a', '\u0105': 'a', + '\u0106': 'C', '\u0108': 'C', '\u010a': 'C', '\u010c': 'C', + '\u0107': 'c', '\u0109': 'c', '\u010b': 'c', '\u010d': 'c', + '\u010e': 'D', '\u0110': 'D', '\u010f': 'd', '\u0111': 'd', + '\u0112': 'E', '\u0114': 'E', '\u0116': 'E', '\u0118': 'E', '\u011a': 'E', + '\u0113': 'e', '\u0115': 'e', '\u0117': 'e', '\u0119': 'e', '\u011b': 'e', + '\u011c': 'G', '\u011e': 'G', '\u0120': 'G', '\u0122': 'G', + '\u011d': 'g', '\u011f': 'g', '\u0121': 'g', '\u0123': 'g', + '\u0124': 'H', '\u0126': 'H', '\u0125': 'h', '\u0127': 'h', + '\u0128': 'I', '\u012a': 'I', '\u012c': 'I', '\u012e': 'I', '\u0130': 'I', + '\u0129': 'i', '\u012b': 'i', '\u012d': 'i', '\u012f': 'i', '\u0131': 'i', + '\u0134': 'J', '\u0135': 'j', + '\u0136': 'K', '\u0137': 'k', '\u0138': 'k', + '\u0139': 'L', '\u013b': 'L', '\u013d': 'L', '\u013f': 'L', '\u0141': 'L', + '\u013a': 'l', '\u013c': 'l', '\u013e': 'l', '\u0140': 'l', '\u0142': 'l', + '\u0143': 'N', '\u0145': 'N', '\u0147': 'N', '\u014a': 'N', + '\u0144': 'n', '\u0146': 'n', '\u0148': 'n', '\u014b': 'n', + '\u014c': 'O', '\u014e': 'O', '\u0150': 'O', + '\u014d': 'o', '\u014f': 'o', '\u0151': 'o', + '\u0154': 'R', '\u0156': 'R', '\u0158': 'R', + '\u0155': 'r', '\u0157': 'r', '\u0159': 'r', + '\u015a': 'S', '\u015c': 'S', '\u015e': 'S', '\u0160': 'S', + '\u015b': 's', '\u015d': 's', '\u015f': 's', '\u0161': 's', + '\u0162': 'T', '\u0164': 'T', '\u0166': 'T', + '\u0163': 't', '\u0165': 't', '\u0167': 't', + '\u0168': 'U', '\u016a': 'U', '\u016c': 'U', '\u016e': 'U', '\u0170': 'U', '\u0172': 'U', + '\u0169': 'u', '\u016b': 'u', '\u016d': 'u', '\u016f': 'u', '\u0171': 'u', '\u0173': 'u', + '\u0174': 'W', '\u0175': 'w', + '\u0176': 'Y', '\u0177': 'y', '\u0178': 'Y', + '\u0179': 'Z', '\u017b': 'Z', '\u017d': 'Z', + '\u017a': 'z', '\u017c': 'z', '\u017e': 'z', + '\u0132': 'IJ', '\u0133': 'ij', + '\u0152': 'Oe', '\u0153': 'oe', + '\u0149': "'n", '\u017f': 's' + }; + + /** Used to map characters to HTML entities. */ + var htmlEscapes = { + '&': '&', + '<': '<', + '>': '>', + '"': '"', + "'": ''' + }; + + /** Used to map HTML entities to characters. */ + var htmlUnescapes = { + '&': '&', + '<': '<', + '>': '>', + '"': '"', + ''': "'" + }; + + /** Used to escape characters for inclusion in compiled string literals. */ + var stringEscapes = { + '\\': '\\', + "'": "'", + '\n': 'n', + '\r': 'r', + '\u2028': 'u2028', + '\u2029': 'u2029' + }; + + /** Built-in method references without a dependency on `root`. */ + var freeParseFloat = parseFloat, + freeParseInt = parseInt; + + /** Detect free variable `global` from Node.js. */ + var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; + + /** Detect free variable `self`. */ + var freeSelf = typeof self == 'object' && self && self.Object === Object && self; + + /** Used as a reference to the global object. */ + var root = freeGlobal || freeSelf || Function('return this')(); + + /** Detect free variable `exports`. */ + var freeExports = true && exports && !exports.nodeType && exports; + + /** Detect free variable `module`. */ + var freeModule = freeExports && "object" == 'object' && module && !module.nodeType && module; + + /** Detect the popular CommonJS extension `module.exports`. */ + var moduleExports = freeModule && freeModule.exports === freeExports; + + /** Detect free variable `process` from Node.js. */ + var freeProcess = moduleExports && freeGlobal.process; + + /** Used to access faster Node.js helpers. */ + var nodeUtil = (function() { + try { + // Use `util.types` for Node.js 10+. + var types = freeModule && freeModule.require && freeModule.require('util').types; + + if (types) { + return types; + } + + // Legacy `process.binding('util')` for Node.js < 10. + return freeProcess && freeProcess.binding && freeProcess.binding('util'); + } catch (e) {} + }()); + + /* Node.js helper references. */ + var nodeIsArrayBuffer = nodeUtil && nodeUtil.isArrayBuffer, + nodeIsDate = nodeUtil && nodeUtil.isDate, + nodeIsMap = nodeUtil && nodeUtil.isMap, + nodeIsRegExp = nodeUtil && nodeUtil.isRegExp, + nodeIsSet = nodeUtil && nodeUtil.isSet, + nodeIsTypedArray = nodeUtil && nodeUtil.isTypedArray; + + /*--------------------------------------------------------------------------*/ + + /** + * A faster alternative to `Function#apply`, this function invokes `func` + * with the `this` binding of `thisArg` and the arguments of `args`. + * + * @private + * @param {Function} func The function to invoke. + * @param {*} thisArg The `this` binding of `func`. + * @param {Array} args The arguments to invoke `func` with. + * @returns {*} Returns the result of `func`. + */ + function apply(func, thisArg, args) { + switch (args.length) { + case 0: return func.call(thisArg); + case 1: return func.call(thisArg, args[0]); + case 2: return func.call(thisArg, args[0], args[1]); + case 3: return func.call(thisArg, args[0], args[1], args[2]); + } + return func.apply(thisArg, args); + } + + /** + * A specialized version of `baseAggregator` for arrays. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} setter The function to set `accumulator` values. + * @param {Function} iteratee The iteratee to transform keys. + * @param {Object} accumulator The initial aggregated object. + * @returns {Function} Returns `accumulator`. + */ + function arrayAggregator(array, setter, iteratee, accumulator) { + var index = -1, + length = array == null ? 0 : array.length; + + while (++index < length) { + var value = array[index]; + setter(accumulator, value, iteratee(value), array); + } + return accumulator; + } + + /** + * A specialized version of `_.forEach` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns `array`. + */ + function arrayEach(array, iteratee) { + var index = -1, + length = array == null ? 0 : array.length; + + while (++index < length) { + if (iteratee(array[index], index, array) === false) { + break; + } + } + return array; + } + + /** + * A specialized version of `_.forEachRight` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns `array`. + */ + function arrayEachRight(array, iteratee) { + var length = array == null ? 0 : array.length; + + while (length--) { + if (iteratee(array[length], length, array) === false) { + break; + } + } + return array; + } + + /** + * A specialized version of `_.every` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {boolean} Returns `true` if all elements pass the predicate check, + * else `false`. + */ + function arrayEvery(array, predicate) { + var index = -1, + length = array == null ? 0 : array.length; + + while (++index < length) { + if (!predicate(array[index], index, array)) { + return false; + } + } + return true; + } + + /** + * A specialized version of `_.filter` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {Array} Returns the new filtered array. + */ + function arrayFilter(array, predicate) { + var index = -1, + length = array == null ? 0 : array.length, + resIndex = 0, + result = []; + + while (++index < length) { + var value = array[index]; + if (predicate(value, index, array)) { + result[resIndex++] = value; + } + } + return result; + } + + /** + * A specialized version of `_.includes` for arrays without support for + * specifying an index to search from. + * + * @private + * @param {Array} [array] The array to inspect. + * @param {*} target The value to search for. + * @returns {boolean} Returns `true` if `target` is found, else `false`. + */ + function arrayIncludes(array, value) { + var length = array == null ? 0 : array.length; + return !!length && baseIndexOf(array, value, 0) > -1; + } + + /** + * This function is like `arrayIncludes` except that it accepts a comparator. + * + * @private + * @param {Array} [array] The array to inspect. + * @param {*} target The value to search for. + * @param {Function} comparator The comparator invoked per element. + * @returns {boolean} Returns `true` if `target` is found, else `false`. + */ + function arrayIncludesWith(array, value, comparator) { + var index = -1, + length = array == null ? 0 : array.length; + + while (++index < length) { + if (comparator(value, array[index])) { + return true; + } + } + return false; + } + + /** + * A specialized version of `_.map` for arrays without support for iteratee + * shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns the new mapped array. + */ + function arrayMap(array, iteratee) { + var index = -1, + length = array == null ? 0 : array.length, + result = Array(length); + + while (++index < length) { + result[index] = iteratee(array[index], index, array); + } + return result; + } + + /** + * Appends the elements of `values` to `array`. + * + * @private + * @param {Array} array The array to modify. + * @param {Array} values The values to append. + * @returns {Array} Returns `array`. + */ + function arrayPush(array, values) { + var index = -1, + length = values.length, + offset = array.length; + + while (++index < length) { + array[offset + index] = values[index]; + } + return array; + } + + /** + * A specialized version of `_.reduce` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {*} [accumulator] The initial value. + * @param {boolean} [initAccum] Specify using the first element of `array` as + * the initial value. + * @returns {*} Returns the accumulated value. + */ + function arrayReduce(array, iteratee, accumulator, initAccum) { + var index = -1, + length = array == null ? 0 : array.length; + + if (initAccum && length) { + accumulator = array[++index]; + } + while (++index < length) { + accumulator = iteratee(accumulator, array[index], index, array); + } + return accumulator; + } + + /** + * A specialized version of `_.reduceRight` for arrays without support for + * iteratee shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {*} [accumulator] The initial value. + * @param {boolean} [initAccum] Specify using the last element of `array` as + * the initial value. + * @returns {*} Returns the accumulated value. + */ + function arrayReduceRight(array, iteratee, accumulator, initAccum) { + var length = array == null ? 0 : array.length; + if (initAccum && length) { + accumulator = array[--length]; + } + while (length--) { + accumulator = iteratee(accumulator, array[length], length, array); + } + return accumulator; + } + + /** + * A specialized version of `_.some` for arrays without support for iteratee + * shorthands. + * + * @private + * @param {Array} [array] The array to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {boolean} Returns `true` if any element passes the predicate check, + * else `false`. + */ + function arraySome(array, predicate) { + var index = -1, + length = array == null ? 0 : array.length; + + while (++index < length) { + if (predicate(array[index], index, array)) { + return true; + } + } + return false; + } + + /** + * Gets the size of an ASCII `string`. + * + * @private + * @param {string} string The string inspect. + * @returns {number} Returns the string size. + */ + var asciiSize = baseProperty('length'); + + /** + * Converts an ASCII `string` to an array. + * + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the converted array. + */ + function asciiToArray(string) { + return string.split(''); + } + + /** + * Splits an ASCII `string` into an array of its words. + * + * @private + * @param {string} The string to inspect. + * @returns {Array} Returns the words of `string`. + */ + function asciiWords(string) { + return string.match(reAsciiWord) || []; + } + + /** + * The base implementation of methods like `_.findKey` and `_.findLastKey`, + * without support for iteratee shorthands, which iterates over `collection` + * using `eachFunc`. + * + * @private + * @param {Array|Object} collection The collection to inspect. + * @param {Function} predicate The function invoked per iteration. + * @param {Function} eachFunc The function to iterate over `collection`. + * @returns {*} Returns the found element or its key, else `undefined`. + */ + function baseFindKey(collection, predicate, eachFunc) { + var result; + eachFunc(collection, function(value, key, collection) { + if (predicate(value, key, collection)) { + result = key; + return false; + } + }); + return result; + } + + /** + * The base implementation of `_.findIndex` and `_.findLastIndex` without + * support for iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Function} predicate The function invoked per iteration. + * @param {number} fromIndex The index to search from. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function baseFindIndex(array, predicate, fromIndex, fromRight) { + var length = array.length, + index = fromIndex + (fromRight ? 1 : -1); + + while ((fromRight ? index-- : ++index < length)) { + if (predicate(array[index], index, array)) { + return index; + } + } + return -1; + } + + /** + * The base implementation of `_.indexOf` without `fromIndex` bounds checks. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function baseIndexOf(array, value, fromIndex) { + return value === value + ? strictIndexOf(array, value, fromIndex) + : baseFindIndex(array, baseIsNaN, fromIndex); + } + + /** + * This function is like `baseIndexOf` except that it accepts a comparator. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @param {Function} comparator The comparator invoked per element. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function baseIndexOfWith(array, value, fromIndex, comparator) { + var index = fromIndex - 1, + length = array.length; + + while (++index < length) { + if (comparator(array[index], value)) { + return index; + } + } + return -1; + } + + /** + * The base implementation of `_.isNaN` without support for number objects. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`. + */ + function baseIsNaN(value) { + return value !== value; + } + + /** + * The base implementation of `_.mean` and `_.meanBy` without support for + * iteratee shorthands. + * + * @private + * @param {Array} array The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {number} Returns the mean. + */ + function baseMean(array, iteratee) { + var length = array == null ? 0 : array.length; + return length ? (baseSum(array, iteratee) / length) : NAN; + } + + /** + * The base implementation of `_.property` without support for deep paths. + * + * @private + * @param {string} key The key of the property to get. + * @returns {Function} Returns the new accessor function. + */ + function baseProperty(key) { + return function(object) { + return object == null ? undefined : object[key]; + }; + } + + /** + * The base implementation of `_.propertyOf` without support for deep paths. + * + * @private + * @param {Object} object The object to query. + * @returns {Function} Returns the new accessor function. + */ + function basePropertyOf(object) { + return function(key) { + return object == null ? undefined : object[key]; + }; + } + + /** + * The base implementation of `_.reduce` and `_.reduceRight`, without support + * for iteratee shorthands, which iterates over `collection` using `eachFunc`. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {*} accumulator The initial value. + * @param {boolean} initAccum Specify using the first or last element of + * `collection` as the initial value. + * @param {Function} eachFunc The function to iterate over `collection`. + * @returns {*} Returns the accumulated value. + */ + function baseReduce(collection, iteratee, accumulator, initAccum, eachFunc) { + eachFunc(collection, function(value, index, collection) { + accumulator = initAccum + ? (initAccum = false, value) + : iteratee(accumulator, value, index, collection); + }); + return accumulator; + } + + /** + * The base implementation of `_.sortBy` which uses `comparer` to define the + * sort order of `array` and replaces criteria objects with their corresponding + * values. + * + * @private + * @param {Array} array The array to sort. + * @param {Function} comparer The function to define sort order. + * @returns {Array} Returns `array`. + */ + function baseSortBy(array, comparer) { + var length = array.length; + + array.sort(comparer); + while (length--) { + array[length] = array[length].value; + } + return array; + } + + /** + * The base implementation of `_.sum` and `_.sumBy` without support for + * iteratee shorthands. + * + * @private + * @param {Array} array The array to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {number} Returns the sum. + */ + function baseSum(array, iteratee) { + var result, + index = -1, + length = array.length; + + while (++index < length) { + var current = iteratee(array[index]); + if (current !== undefined) { + result = result === undefined ? current : (result + current); + } + } + return result; + } + + /** + * The base implementation of `_.times` without support for iteratee shorthands + * or max array length checks. + * + * @private + * @param {number} n The number of times to invoke `iteratee`. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns the array of results. + */ + function baseTimes(n, iteratee) { + var index = -1, + result = Array(n); + + while (++index < n) { + result[index] = iteratee(index); + } + return result; + } + + /** + * The base implementation of `_.toPairs` and `_.toPairsIn` which creates an array + * of key-value pairs for `object` corresponding to the property names of `props`. + * + * @private + * @param {Object} object The object to query. + * @param {Array} props The property names to get values for. + * @returns {Object} Returns the key-value pairs. + */ + function baseToPairs(object, props) { + return arrayMap(props, function(key) { + return [key, object[key]]; + }); + } + + /** + * The base implementation of `_.trim`. + * + * @private + * @param {string} string The string to trim. + * @returns {string} Returns the trimmed string. + */ + function baseTrim(string) { + return string + ? string.slice(0, trimmedEndIndex(string) + 1).replace(reTrimStart, '') + : string; + } + + /** + * The base implementation of `_.unary` without support for storing metadata. + * + * @private + * @param {Function} func The function to cap arguments for. + * @returns {Function} Returns the new capped function. + */ + function baseUnary(func) { + return function(value) { + return func(value); + }; + } + + /** + * The base implementation of `_.values` and `_.valuesIn` which creates an + * array of `object` property values corresponding to the property names + * of `props`. + * + * @private + * @param {Object} object The object to query. + * @param {Array} props The property names to get values for. + * @returns {Object} Returns the array of property values. + */ + function baseValues(object, props) { + return arrayMap(props, function(key) { + return object[key]; + }); + } + + /** + * Checks if a `cache` value for `key` exists. + * + * @private + * @param {Object} cache The cache to query. + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ + function cacheHas(cache, key) { + return cache.has(key); + } + + /** + * Used by `_.trim` and `_.trimStart` to get the index of the first string symbol + * that is not found in the character symbols. + * + * @private + * @param {Array} strSymbols The string symbols to inspect. + * @param {Array} chrSymbols The character symbols to find. + * @returns {number} Returns the index of the first unmatched string symbol. + */ + function charsStartIndex(strSymbols, chrSymbols) { + var index = -1, + length = strSymbols.length; + + while (++index < length && baseIndexOf(chrSymbols, strSymbols[index], 0) > -1) {} + return index; + } + + /** + * Used by `_.trim` and `_.trimEnd` to get the index of the last string symbol + * that is not found in the character symbols. + * + * @private + * @param {Array} strSymbols The string symbols to inspect. + * @param {Array} chrSymbols The character symbols to find. + * @returns {number} Returns the index of the last unmatched string symbol. + */ + function charsEndIndex(strSymbols, chrSymbols) { + var index = strSymbols.length; + + while (index-- && baseIndexOf(chrSymbols, strSymbols[index], 0) > -1) {} + return index; + } + + /** + * Gets the number of `placeholder` occurrences in `array`. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} placeholder The placeholder to search for. + * @returns {number} Returns the placeholder count. + */ + function countHolders(array, placeholder) { + var length = array.length, + result = 0; + + while (length--) { + if (array[length] === placeholder) { + ++result; + } + } + return result; + } + + /** + * Used by `_.deburr` to convert Latin-1 Supplement and Latin Extended-A + * letters to basic Latin letters. + * + * @private + * @param {string} letter The matched letter to deburr. + * @returns {string} Returns the deburred letter. + */ + var deburrLetter = basePropertyOf(deburredLetters); + + /** + * Used by `_.escape` to convert characters to HTML entities. + * + * @private + * @param {string} chr The matched character to escape. + * @returns {string} Returns the escaped character. + */ + var escapeHtmlChar = basePropertyOf(htmlEscapes); + + /** + * Used by `_.template` to escape characters for inclusion in compiled string literals. + * + * @private + * @param {string} chr The matched character to escape. + * @returns {string} Returns the escaped character. + */ + function escapeStringChar(chr) { + return '\\' + stringEscapes[chr]; + } + + /** + * Gets the value at `key` of `object`. + * + * @private + * @param {Object} [object] The object to query. + * @param {string} key The key of the property to get. + * @returns {*} Returns the property value. + */ + function getValue(object, key) { + return object == null ? undefined : object[key]; + } + + /** + * Checks if `string` contains Unicode symbols. + * + * @private + * @param {string} string The string to inspect. + * @returns {boolean} Returns `true` if a symbol is found, else `false`. + */ + function hasUnicode(string) { + return reHasUnicode.test(string); + } + + /** + * Checks if `string` contains a word composed of Unicode symbols. + * + * @private + * @param {string} string The string to inspect. + * @returns {boolean} Returns `true` if a word is found, else `false`. + */ + function hasUnicodeWord(string) { + return reHasUnicodeWord.test(string); + } + + /** + * Converts `iterator` to an array. + * + * @private + * @param {Object} iterator The iterator to convert. + * @returns {Array} Returns the converted array. + */ + function iteratorToArray(iterator) { + var data, + result = []; + + while (!(data = iterator.next()).done) { + result.push(data.value); + } + return result; + } + + /** + * Converts `map` to its key-value pairs. + * + * @private + * @param {Object} map The map to convert. + * @returns {Array} Returns the key-value pairs. + */ + function mapToArray(map) { + var index = -1, + result = Array(map.size); + + map.forEach(function(value, key) { + result[++index] = [key, value]; + }); + return result; + } + + /** + * Creates a unary function that invokes `func` with its argument transformed. + * + * @private + * @param {Function} func The function to wrap. + * @param {Function} transform The argument transform. + * @returns {Function} Returns the new function. + */ + function overArg(func, transform) { + return function(arg) { + return func(transform(arg)); + }; + } + + /** + * Replaces all `placeholder` elements in `array` with an internal placeholder + * and returns an array of their indexes. + * + * @private + * @param {Array} array The array to modify. + * @param {*} placeholder The placeholder to replace. + * @returns {Array} Returns the new array of placeholder indexes. + */ + function replaceHolders(array, placeholder) { + var index = -1, + length = array.length, + resIndex = 0, + result = []; + + while (++index < length) { + var value = array[index]; + if (value === placeholder || value === PLACEHOLDER) { + array[index] = PLACEHOLDER; + result[resIndex++] = index; + } + } + return result; + } + + /** + * Converts `set` to an array of its values. + * + * @private + * @param {Object} set The set to convert. + * @returns {Array} Returns the values. + */ + function setToArray(set) { + var index = -1, + result = Array(set.size); + + set.forEach(function(value) { + result[++index] = value; + }); + return result; + } + + /** + * Converts `set` to its value-value pairs. + * + * @private + * @param {Object} set The set to convert. + * @returns {Array} Returns the value-value pairs. + */ + function setToPairs(set) { + var index = -1, + result = Array(set.size); + + set.forEach(function(value) { + result[++index] = [value, value]; + }); + return result; + } + + /** + * A specialized version of `_.indexOf` which performs strict equality + * comparisons of values, i.e. `===`. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function strictIndexOf(array, value, fromIndex) { + var index = fromIndex - 1, + length = array.length; + + while (++index < length) { + if (array[index] === value) { + return index; + } + } + return -1; + } + + /** + * A specialized version of `_.lastIndexOf` which performs strict equality + * comparisons of values, i.e. `===`. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function strictLastIndexOf(array, value, fromIndex) { + var index = fromIndex + 1; + while (index--) { + if (array[index] === value) { + return index; + } + } + return index; + } + + /** + * Gets the number of symbols in `string`. + * + * @private + * @param {string} string The string to inspect. + * @returns {number} Returns the string size. + */ + function stringSize(string) { + return hasUnicode(string) + ? unicodeSize(string) + : asciiSize(string); + } + + /** + * Converts `string` to an array. + * + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the converted array. + */ + function stringToArray(string) { + return hasUnicode(string) + ? unicodeToArray(string) + : asciiToArray(string); + } + + /** + * Used by `_.trim` and `_.trimEnd` to get the index of the last non-whitespace + * character of `string`. + * + * @private + * @param {string} string The string to inspect. + * @returns {number} Returns the index of the last non-whitespace character. + */ + function trimmedEndIndex(string) { + var index = string.length; + + while (index-- && reWhitespace.test(string.charAt(index))) {} + return index; + } + + /** + * Used by `_.unescape` to convert HTML entities to characters. + * + * @private + * @param {string} chr The matched character to unescape. + * @returns {string} Returns the unescaped character. + */ + var unescapeHtmlChar = basePropertyOf(htmlUnescapes); + + /** + * Gets the size of a Unicode `string`. + * + * @private + * @param {string} string The string inspect. + * @returns {number} Returns the string size. + */ + function unicodeSize(string) { + var result = reUnicode.lastIndex = 0; + while (reUnicode.test(string)) { + ++result; + } + return result; + } + + /** + * Converts a Unicode `string` to an array. + * + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the converted array. + */ + function unicodeToArray(string) { + return string.match(reUnicode) || []; + } + + /** + * Splits a Unicode `string` into an array of its words. + * + * @private + * @param {string} The string to inspect. + * @returns {Array} Returns the words of `string`. + */ + function unicodeWords(string) { + return string.match(reUnicodeWord) || []; + } + + /*--------------------------------------------------------------------------*/ + + /** + * Create a new pristine `lodash` function using the `context` object. + * + * @static + * @memberOf _ + * @since 1.1.0 + * @category Util + * @param {Object} [context=root] The context object. + * @returns {Function} Returns a new `lodash` function. + * @example + * + * _.mixin({ 'foo': _.constant('foo') }); + * + * var lodash = _.runInContext(); + * lodash.mixin({ 'bar': lodash.constant('bar') }); + * + * _.isFunction(_.foo); + * // => true + * _.isFunction(_.bar); + * // => false + * + * lodash.isFunction(lodash.foo); + * // => false + * lodash.isFunction(lodash.bar); + * // => true + * + * // Create a suped-up `defer` in Node.js. + * var defer = _.runInContext({ 'setTimeout': setImmediate }).defer; + */ + var runInContext = (function runInContext(context) { + context = context == null ? root : _.defaults(root.Object(), context, _.pick(root, contextProps)); + + /** Built-in constructor references. */ + var Array = context.Array, + Date = context.Date, + Error = context.Error, + Function = context.Function, + Math = context.Math, + Object = context.Object, + RegExp = context.RegExp, + String = context.String, + TypeError = context.TypeError; + + /** Used for built-in method references. */ + var arrayProto = Array.prototype, + funcProto = Function.prototype, + objectProto = Object.prototype; + + /** Used to detect overreaching core-js shims. */ + var coreJsData = context['__core-js_shared__']; + + /** Used to resolve the decompiled source of functions. */ + var funcToString = funcProto.toString; + + /** Used to check objects for own properties. */ + var hasOwnProperty = objectProto.hasOwnProperty; + + /** Used to generate unique IDs. */ + var idCounter = 0; + + /** Used to detect methods masquerading as native. */ + var maskSrcKey = (function() { + var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || ''); + return uid ? ('Symbol(src)_1.' + uid) : ''; + }()); + + /** + * Used to resolve the + * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) + * of values. + */ + var nativeObjectToString = objectProto.toString; + + /** Used to infer the `Object` constructor. */ + var objectCtorString = funcToString.call(Object); + + /** Used to restore the original `_` reference in `_.noConflict`. */ + var oldDash = root._; + + /** Used to detect if a method is native. */ + var reIsNative = RegExp('^' + + funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&') + .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$' + ); + + /** Built-in value references. */ + var Buffer = moduleExports ? context.Buffer : undefined, + Symbol = context.Symbol, + Uint8Array = context.Uint8Array, + allocUnsafe = Buffer ? Buffer.allocUnsafe : undefined, + getPrototype = overArg(Object.getPrototypeOf, Object), + objectCreate = Object.create, + propertyIsEnumerable = objectProto.propertyIsEnumerable, + splice = arrayProto.splice, + spreadableSymbol = Symbol ? Symbol.isConcatSpreadable : undefined, + symIterator = Symbol ? Symbol.iterator : undefined, + symToStringTag = Symbol ? Symbol.toStringTag : undefined; + + var defineProperty = (function() { + try { + var func = getNative(Object, 'defineProperty'); + func({}, '', {}); + return func; + } catch (e) {} + }()); + + /** Mocked built-ins. */ + var ctxClearTimeout = context.clearTimeout !== root.clearTimeout && context.clearTimeout, + ctxNow = Date && Date.now !== root.Date.now && Date.now, + ctxSetTimeout = context.setTimeout !== root.setTimeout && context.setTimeout; + + /* Built-in method references for those with the same name as other `lodash` methods. */ + var nativeCeil = Math.ceil, + nativeFloor = Math.floor, + nativeGetSymbols = Object.getOwnPropertySymbols, + nativeIsBuffer = Buffer ? Buffer.isBuffer : undefined, + nativeIsFinite = context.isFinite, + nativeJoin = arrayProto.join, + nativeKeys = overArg(Object.keys, Object), + nativeMax = Math.max, + nativeMin = Math.min, + nativeNow = Date.now, + nativeParseInt = context.parseInt, + nativeRandom = Math.random, + nativeReverse = arrayProto.reverse; + + /* Built-in method references that are verified to be native. */ + var DataView = getNative(context, 'DataView'), + Map = getNative(context, 'Map'), + Promise = getNative(context, 'Promise'), + Set = getNative(context, 'Set'), + WeakMap = getNative(context, 'WeakMap'), + nativeCreate = getNative(Object, 'create'); + + /** Used to store function metadata. */ + var metaMap = WeakMap && new WeakMap; + + /** Used to lookup unminified function names. */ + var realNames = {}; + + /** Used to detect maps, sets, and weakmaps. */ + var dataViewCtorString = toSource(DataView), + mapCtorString = toSource(Map), + promiseCtorString = toSource(Promise), + setCtorString = toSource(Set), + weakMapCtorString = toSource(WeakMap); + + /** Used to convert symbols to primitives and strings. */ + var symbolProto = Symbol ? Symbol.prototype : undefined, + symbolValueOf = symbolProto ? symbolProto.valueOf : undefined, + symbolToString = symbolProto ? symbolProto.toString : undefined; + + /*------------------------------------------------------------------------*/ + + /** + * Creates a `lodash` object which wraps `value` to enable implicit method + * chain sequences. Methods that operate on and return arrays, collections, + * and functions can be chained together. Methods that retrieve a single value + * or may return a primitive value will automatically end the chain sequence + * and return the unwrapped value. Otherwise, the value must be unwrapped + * with `_#value`. + * + * Explicit chain sequences, which must be unwrapped with `_#value`, may be + * enabled using `_.chain`. + * + * The execution of chained methods is lazy, that is, it's deferred until + * `_#value` is implicitly or explicitly called. + * + * Lazy evaluation allows several methods to support shortcut fusion. + * Shortcut fusion is an optimization to merge iteratee calls; this avoids + * the creation of intermediate arrays and can greatly reduce the number of + * iteratee executions. Sections of a chain sequence qualify for shortcut + * fusion if the section is applied to an array and iteratees accept only + * one argument. The heuristic for whether a section qualifies for shortcut + * fusion is subject to change. + * + * Chaining is supported in custom builds as long as the `_#value` method is + * directly or indirectly included in the build. + * + * In addition to lodash methods, wrappers have `Array` and `String` methods. + * + * The wrapper `Array` methods are: + * `concat`, `join`, `pop`, `push`, `shift`, `sort`, `splice`, and `unshift` + * + * The wrapper `String` methods are: + * `replace` and `split` + * + * The wrapper methods that support shortcut fusion are: + * `at`, `compact`, `drop`, `dropRight`, `dropWhile`, `filter`, `find`, + * `findLast`, `head`, `initial`, `last`, `map`, `reject`, `reverse`, `slice`, + * `tail`, `take`, `takeRight`, `takeRightWhile`, `takeWhile`, and `toArray` + * + * The chainable wrapper methods are: + * `after`, `ary`, `assign`, `assignIn`, `assignInWith`, `assignWith`, `at`, + * `before`, `bind`, `bindAll`, `bindKey`, `castArray`, `chain`, `chunk`, + * `commit`, `compact`, `concat`, `conforms`, `constant`, `countBy`, `create`, + * `curry`, `debounce`, `defaults`, `defaultsDeep`, `defer`, `delay`, + * `difference`, `differenceBy`, `differenceWith`, `drop`, `dropRight`, + * `dropRightWhile`, `dropWhile`, `extend`, `extendWith`, `fill`, `filter`, + * `flatMap`, `flatMapDeep`, `flatMapDepth`, `flatten`, `flattenDeep`, + * `flattenDepth`, `flip`, `flow`, `flowRight`, `fromPairs`, `functions`, + * `functionsIn`, `groupBy`, `initial`, `intersection`, `intersectionBy`, + * `intersectionWith`, `invert`, `invertBy`, `invokeMap`, `iteratee`, `keyBy`, + * `keys`, `keysIn`, `map`, `mapKeys`, `mapValues`, `matches`, `matchesProperty`, + * `memoize`, `merge`, `mergeWith`, `method`, `methodOf`, `mixin`, `negate`, + * `nthArg`, `omit`, `omitBy`, `once`, `orderBy`, `over`, `overArgs`, + * `overEvery`, `overSome`, `partial`, `partialRight`, `partition`, `pick`, + * `pickBy`, `plant`, `property`, `propertyOf`, `pull`, `pullAll`, `pullAllBy`, + * `pullAllWith`, `pullAt`, `push`, `range`, `rangeRight`, `rearg`, `reject`, + * `remove`, `rest`, `reverse`, `sampleSize`, `set`, `setWith`, `shuffle`, + * `slice`, `sort`, `sortBy`, `splice`, `spread`, `tail`, `take`, `takeRight`, + * `takeRightWhile`, `takeWhile`, `tap`, `throttle`, `thru`, `toArray`, + * `toPairs`, `toPairsIn`, `toPath`, `toPlainObject`, `transform`, `unary`, + * `union`, `unionBy`, `unionWith`, `uniq`, `uniqBy`, `uniqWith`, `unset`, + * `unshift`, `unzip`, `unzipWith`, `update`, `updateWith`, `values`, + * `valuesIn`, `without`, `wrap`, `xor`, `xorBy`, `xorWith`, `zip`, + * `zipObject`, `zipObjectDeep`, and `zipWith` + * + * The wrapper methods that are **not** chainable by default are: + * `add`, `attempt`, `camelCase`, `capitalize`, `ceil`, `clamp`, `clone`, + * `cloneDeep`, `cloneDeepWith`, `cloneWith`, `conformsTo`, `deburr`, + * `defaultTo`, `divide`, `each`, `eachRight`, `endsWith`, `eq`, `escape`, + * `escapeRegExp`, `every`, `find`, `findIndex`, `findKey`, `findLast`, + * `findLastIndex`, `findLastKey`, `first`, `floor`, `forEach`, `forEachRight`, + * `forIn`, `forInRight`, `forOwn`, `forOwnRight`, `get`, `gt`, `gte`, `has`, + * `hasIn`, `head`, `identity`, `includes`, `indexOf`, `inRange`, `invoke`, + * `isArguments`, `isArray`, `isArrayBuffer`, `isArrayLike`, `isArrayLikeObject`, + * `isBoolean`, `isBuffer`, `isDate`, `isElement`, `isEmpty`, `isEqual`, + * `isEqualWith`, `isError`, `isFinite`, `isFunction`, `isInteger`, `isLength`, + * `isMap`, `isMatch`, `isMatchWith`, `isNaN`, `isNative`, `isNil`, `isNull`, + * `isNumber`, `isObject`, `isObjectLike`, `isPlainObject`, `isRegExp`, + * `isSafeInteger`, `isSet`, `isString`, `isUndefined`, `isTypedArray`, + * `isWeakMap`, `isWeakSet`, `join`, `kebabCase`, `last`, `lastIndexOf`, + * `lowerCase`, `lowerFirst`, `lt`, `lte`, `max`, `maxBy`, `mean`, `meanBy`, + * `min`, `minBy`, `multiply`, `noConflict`, `noop`, `now`, `nth`, `pad`, + * `padEnd`, `padStart`, `parseInt`, `pop`, `random`, `reduce`, `reduceRight`, + * `repeat`, `result`, `round`, `runInContext`, `sample`, `shift`, `size`, + * `snakeCase`, `some`, `sortedIndex`, `sortedIndexBy`, `sortedLastIndex`, + * `sortedLastIndexBy`, `startCase`, `startsWith`, `stubArray`, `stubFalse`, + * `stubObject`, `stubString`, `stubTrue`, `subtract`, `sum`, `sumBy`, + * `template`, `times`, `toFinite`, `toInteger`, `toJSON`, `toLength`, + * `toLower`, `toNumber`, `toSafeInteger`, `toString`, `toUpper`, `trim`, + * `trimEnd`, `trimStart`, `truncate`, `unescape`, `uniqueId`, `upperCase`, + * `upperFirst`, `value`, and `words` + * + * @name _ + * @constructor + * @category Seq + * @param {*} value The value to wrap in a `lodash` instance. + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * function square(n) { + * return n * n; + * } + * + * var wrapped = _([1, 2, 3]); + * + * // Returns an unwrapped value. + * wrapped.reduce(_.add); + * // => 6 + * + * // Returns a wrapped value. + * var squares = wrapped.map(square); + * + * _.isArray(squares); + * // => false + * + * _.isArray(squares.value()); + * // => true + */ + function lodash(value) { + if (isObjectLike(value) && !isArray(value) && !(value instanceof LazyWrapper)) { + if (value instanceof LodashWrapper) { + return value; + } + if (hasOwnProperty.call(value, '__wrapped__')) { + return wrapperClone(value); + } + } + return new LodashWrapper(value); + } + + /** + * The base implementation of `_.create` without support for assigning + * properties to the created object. + * + * @private + * @param {Object} proto The object to inherit from. + * @returns {Object} Returns the new object. + */ + var baseCreate = (function() { + function object() {} + return function(proto) { + if (!isObject(proto)) { + return {}; + } + if (objectCreate) { + return objectCreate(proto); + } + object.prototype = proto; + var result = new object; + object.prototype = undefined; + return result; + }; + }()); + + /** + * The function whose prototype chain sequence wrappers inherit from. + * + * @private + */ + function baseLodash() { + // No operation performed. + } + + /** + * The base constructor for creating `lodash` wrapper objects. + * + * @private + * @param {*} value The value to wrap. + * @param {boolean} [chainAll] Enable explicit method chain sequences. + */ + function LodashWrapper(value, chainAll) { + this.__wrapped__ = value; + this.__actions__ = []; + this.__chain__ = !!chainAll; + this.__index__ = 0; + this.__values__ = undefined; + } + + /** + * By default, the template delimiters used by lodash are like those in + * embedded Ruby (ERB) as well as ES2015 template strings. Change the + * following template settings to use alternative delimiters. + * + * @static + * @memberOf _ + * @type {Object} + */ + lodash.templateSettings = { + + /** + * Used to detect `data` property values to be HTML-escaped. + * + * @memberOf _.templateSettings + * @type {RegExp} + */ + 'escape': reEscape, + + /** + * Used to detect code to be evaluated. + * + * @memberOf _.templateSettings + * @type {RegExp} + */ + 'evaluate': reEvaluate, + + /** + * Used to detect `data` property values to inject. + * + * @memberOf _.templateSettings + * @type {RegExp} + */ + 'interpolate': reInterpolate, + + /** + * Used to reference the data object in the template text. + * + * @memberOf _.templateSettings + * @type {string} + */ + 'variable': '', + + /** + * Used to import variables into the compiled template. + * + * @memberOf _.templateSettings + * @type {Object} + */ + 'imports': { + + /** + * A reference to the `lodash` function. + * + * @memberOf _.templateSettings.imports + * @type {Function} + */ + '_': lodash + } + }; + + // Ensure wrappers are instances of `baseLodash`. + lodash.prototype = baseLodash.prototype; + lodash.prototype.constructor = lodash; + + LodashWrapper.prototype = baseCreate(baseLodash.prototype); + LodashWrapper.prototype.constructor = LodashWrapper; + + /*------------------------------------------------------------------------*/ + + /** + * Creates a lazy wrapper object which wraps `value` to enable lazy evaluation. + * + * @private + * @constructor + * @param {*} value The value to wrap. + */ + function LazyWrapper(value) { + this.__wrapped__ = value; + this.__actions__ = []; + this.__dir__ = 1; + this.__filtered__ = false; + this.__iteratees__ = []; + this.__takeCount__ = MAX_ARRAY_LENGTH; + this.__views__ = []; + } + + /** + * Creates a clone of the lazy wrapper object. + * + * @private + * @name clone + * @memberOf LazyWrapper + * @returns {Object} Returns the cloned `LazyWrapper` object. + */ + function lazyClone() { + var result = new LazyWrapper(this.__wrapped__); + result.__actions__ = copyArray(this.__actions__); + result.__dir__ = this.__dir__; + result.__filtered__ = this.__filtered__; + result.__iteratees__ = copyArray(this.__iteratees__); + result.__takeCount__ = this.__takeCount__; + result.__views__ = copyArray(this.__views__); + return result; + } + + /** + * Reverses the direction of lazy iteration. + * + * @private + * @name reverse + * @memberOf LazyWrapper + * @returns {Object} Returns the new reversed `LazyWrapper` object. + */ + function lazyReverse() { + if (this.__filtered__) { + var result = new LazyWrapper(this); + result.__dir__ = -1; + result.__filtered__ = true; + } else { + result = this.clone(); + result.__dir__ *= -1; + } + return result; + } + + /** + * Extracts the unwrapped value from its lazy wrapper. + * + * @private + * @name value + * @memberOf LazyWrapper + * @returns {*} Returns the unwrapped value. + */ + function lazyValue() { + var array = this.__wrapped__.value(), + dir = this.__dir__, + isArr = isArray(array), + isRight = dir < 0, + arrLength = isArr ? array.length : 0, + view = getView(0, arrLength, this.__views__), + start = view.start, + end = view.end, + length = end - start, + index = isRight ? end : (start - 1), + iteratees = this.__iteratees__, + iterLength = iteratees.length, + resIndex = 0, + takeCount = nativeMin(length, this.__takeCount__); + + if (!isArr || (!isRight && arrLength == length && takeCount == length)) { + return baseWrapperValue(array, this.__actions__); + } + var result = []; + + outer: + while (length-- && resIndex < takeCount) { + index += dir; + + var iterIndex = -1, + value = array[index]; + + while (++iterIndex < iterLength) { + var data = iteratees[iterIndex], + iteratee = data.iteratee, + type = data.type, + computed = iteratee(value); + + if (type == LAZY_MAP_FLAG) { + value = computed; + } else if (!computed) { + if (type == LAZY_FILTER_FLAG) { + continue outer; + } else { + break outer; + } + } + } + result[resIndex++] = value; + } + return result; + } + + // Ensure `LazyWrapper` is an instance of `baseLodash`. + LazyWrapper.prototype = baseCreate(baseLodash.prototype); + LazyWrapper.prototype.constructor = LazyWrapper; + + /*------------------------------------------------------------------------*/ + + /** + * Creates a hash object. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ + function Hash(entries) { + var index = -1, + length = entries == null ? 0 : entries.length; + + this.clear(); + while (++index < length) { + var entry = entries[index]; + this.set(entry[0], entry[1]); + } + } + + /** + * Removes all key-value entries from the hash. + * + * @private + * @name clear + * @memberOf Hash + */ + function hashClear() { + this.__data__ = nativeCreate ? nativeCreate(null) : {}; + this.size = 0; + } + + /** + * Removes `key` and its value from the hash. + * + * @private + * @name delete + * @memberOf Hash + * @param {Object} hash The hash to modify. + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. + */ + function hashDelete(key) { + var result = this.has(key) && delete this.__data__[key]; + this.size -= result ? 1 : 0; + return result; + } + + /** + * Gets the hash value for `key`. + * + * @private + * @name get + * @memberOf Hash + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. + */ + function hashGet(key) { + var data = this.__data__; + if (nativeCreate) { + var result = data[key]; + return result === HASH_UNDEFINED ? undefined : result; + } + return hasOwnProperty.call(data, key) ? data[key] : undefined; + } + + /** + * Checks if a hash value for `key` exists. + * + * @private + * @name has + * @memberOf Hash + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ + function hashHas(key) { + var data = this.__data__; + return nativeCreate ? (data[key] !== undefined) : hasOwnProperty.call(data, key); + } + + /** + * Sets the hash `key` to `value`. + * + * @private + * @name set + * @memberOf Hash + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the hash instance. + */ + function hashSet(key, value) { + var data = this.__data__; + this.size += this.has(key) ? 0 : 1; + data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value; + return this; + } + + // Add methods to `Hash`. + Hash.prototype.clear = hashClear; + Hash.prototype['delete'] = hashDelete; + Hash.prototype.get = hashGet; + Hash.prototype.has = hashHas; + Hash.prototype.set = hashSet; + + /*------------------------------------------------------------------------*/ + + /** + * Creates an list cache object. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ + function ListCache(entries) { + var index = -1, + length = entries == null ? 0 : entries.length; + + this.clear(); + while (++index < length) { + var entry = entries[index]; + this.set(entry[0], entry[1]); + } + } + + /** + * Removes all key-value entries from the list cache. + * + * @private + * @name clear + * @memberOf ListCache + */ + function listCacheClear() { + this.__data__ = []; + this.size = 0; + } + + /** + * Removes `key` and its value from the list cache. + * + * @private + * @name delete + * @memberOf ListCache + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. + */ + function listCacheDelete(key) { + var data = this.__data__, + index = assocIndexOf(data, key); + + if (index < 0) { + return false; + } + var lastIndex = data.length - 1; + if (index == lastIndex) { + data.pop(); + } else { + splice.call(data, index, 1); + } + --this.size; + return true; + } + + /** + * Gets the list cache value for `key`. + * + * @private + * @name get + * @memberOf ListCache + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. + */ + function listCacheGet(key) { + var data = this.__data__, + index = assocIndexOf(data, key); + + return index < 0 ? undefined : data[index][1]; + } + + /** + * Checks if a list cache value for `key` exists. + * + * @private + * @name has + * @memberOf ListCache + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ + function listCacheHas(key) { + return assocIndexOf(this.__data__, key) > -1; + } + + /** + * Sets the list cache `key` to `value`. + * + * @private + * @name set + * @memberOf ListCache + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the list cache instance. + */ + function listCacheSet(key, value) { + var data = this.__data__, + index = assocIndexOf(data, key); + + if (index < 0) { + ++this.size; + data.push([key, value]); + } else { + data[index][1] = value; + } + return this; + } + + // Add methods to `ListCache`. + ListCache.prototype.clear = listCacheClear; + ListCache.prototype['delete'] = listCacheDelete; + ListCache.prototype.get = listCacheGet; + ListCache.prototype.has = listCacheHas; + ListCache.prototype.set = listCacheSet; + + /*------------------------------------------------------------------------*/ + + /** + * Creates a map cache object to store key-value pairs. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ + function MapCache(entries) { + var index = -1, + length = entries == null ? 0 : entries.length; + + this.clear(); + while (++index < length) { + var entry = entries[index]; + this.set(entry[0], entry[1]); + } + } + + /** + * Removes all key-value entries from the map. + * + * @private + * @name clear + * @memberOf MapCache + */ + function mapCacheClear() { + this.size = 0; + this.__data__ = { + 'hash': new Hash, + 'map': new (Map || ListCache), + 'string': new Hash + }; + } + + /** + * Removes `key` and its value from the map. + * + * @private + * @name delete + * @memberOf MapCache + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. + */ + function mapCacheDelete(key) { + var result = getMapData(this, key)['delete'](key); + this.size -= result ? 1 : 0; + return result; + } + + /** + * Gets the map value for `key`. + * + * @private + * @name get + * @memberOf MapCache + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. + */ + function mapCacheGet(key) { + return getMapData(this, key).get(key); + } + + /** + * Checks if a map value for `key` exists. + * + * @private + * @name has + * @memberOf MapCache + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ + function mapCacheHas(key) { + return getMapData(this, key).has(key); + } + + /** + * Sets the map `key` to `value`. + * + * @private + * @name set + * @memberOf MapCache + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the map cache instance. + */ + function mapCacheSet(key, value) { + var data = getMapData(this, key), + size = data.size; + + data.set(key, value); + this.size += data.size == size ? 0 : 1; + return this; + } + + // Add methods to `MapCache`. + MapCache.prototype.clear = mapCacheClear; + MapCache.prototype['delete'] = mapCacheDelete; + MapCache.prototype.get = mapCacheGet; + MapCache.prototype.has = mapCacheHas; + MapCache.prototype.set = mapCacheSet; + + /*------------------------------------------------------------------------*/ + + /** + * + * Creates an array cache object to store unique values. + * + * @private + * @constructor + * @param {Array} [values] The values to cache. + */ + function SetCache(values) { + var index = -1, + length = values == null ? 0 : values.length; + + this.__data__ = new MapCache; + while (++index < length) { + this.add(values[index]); + } + } + + /** + * Adds `value` to the array cache. + * + * @private + * @name add + * @memberOf SetCache + * @alias push + * @param {*} value The value to cache. + * @returns {Object} Returns the cache instance. + */ + function setCacheAdd(value) { + this.__data__.set(value, HASH_UNDEFINED); + return this; + } + + /** + * Checks if `value` is in the array cache. + * + * @private + * @name has + * @memberOf SetCache + * @param {*} value The value to search for. + * @returns {number} Returns `true` if `value` is found, else `false`. + */ + function setCacheHas(value) { + return this.__data__.has(value); + } + + // Add methods to `SetCache`. + SetCache.prototype.add = SetCache.prototype.push = setCacheAdd; + SetCache.prototype.has = setCacheHas; + + /*------------------------------------------------------------------------*/ + + /** + * Creates a stack cache object to store key-value pairs. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ + function Stack(entries) { + var data = this.__data__ = new ListCache(entries); + this.size = data.size; + } + + /** + * Removes all key-value entries from the stack. + * + * @private + * @name clear + * @memberOf Stack + */ + function stackClear() { + this.__data__ = new ListCache; + this.size = 0; + } + + /** + * Removes `key` and its value from the stack. + * + * @private + * @name delete + * @memberOf Stack + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. + */ + function stackDelete(key) { + var data = this.__data__, + result = data['delete'](key); + + this.size = data.size; + return result; + } + + /** + * Gets the stack value for `key`. + * + * @private + * @name get + * @memberOf Stack + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. + */ + function stackGet(key) { + return this.__data__.get(key); + } + + /** + * Checks if a stack value for `key` exists. + * + * @private + * @name has + * @memberOf Stack + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ + function stackHas(key) { + return this.__data__.has(key); + } + + /** + * Sets the stack `key` to `value`. + * + * @private + * @name set + * @memberOf Stack + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the stack cache instance. + */ + function stackSet(key, value) { + var data = this.__data__; + if (data instanceof ListCache) { + var pairs = data.__data__; + if (!Map || (pairs.length < LARGE_ARRAY_SIZE - 1)) { + pairs.push([key, value]); + this.size = ++data.size; + return this; + } + data = this.__data__ = new MapCache(pairs); + } + data.set(key, value); + this.size = data.size; + return this; + } + + // Add methods to `Stack`. + Stack.prototype.clear = stackClear; + Stack.prototype['delete'] = stackDelete; + Stack.prototype.get = stackGet; + Stack.prototype.has = stackHas; + Stack.prototype.set = stackSet; + + /*------------------------------------------------------------------------*/ + + /** + * Creates an array of the enumerable property names of the array-like `value`. + * + * @private + * @param {*} value The value to query. + * @param {boolean} inherited Specify returning inherited property names. + * @returns {Array} Returns the array of property names. + */ + function arrayLikeKeys(value, inherited) { + var isArr = isArray(value), + isArg = !isArr && isArguments(value), + isBuff = !isArr && !isArg && isBuffer(value), + isType = !isArr && !isArg && !isBuff && isTypedArray(value), + skipIndexes = isArr || isArg || isBuff || isType, + result = skipIndexes ? baseTimes(value.length, String) : [], + length = result.length; + + for (var key in value) { + if ((inherited || hasOwnProperty.call(value, key)) && + !(skipIndexes && ( + // Safari 9 has enumerable `arguments.length` in strict mode. + key == 'length' || + // Node.js 0.10 has enumerable non-index properties on buffers. + (isBuff && (key == 'offset' || key == 'parent')) || + // PhantomJS 2 has enumerable non-index properties on typed arrays. + (isType && (key == 'buffer' || key == 'byteLength' || key == 'byteOffset')) || + // Skip index properties. + isIndex(key, length) + ))) { + result.push(key); + } + } + return result; + } + + /** + * A specialized version of `_.sample` for arrays. + * + * @private + * @param {Array} array The array to sample. + * @returns {*} Returns the random element. + */ + function arraySample(array) { + var length = array.length; + return length ? array[baseRandom(0, length - 1)] : undefined; + } + + /** + * A specialized version of `_.sampleSize` for arrays. + * + * @private + * @param {Array} array The array to sample. + * @param {number} n The number of elements to sample. + * @returns {Array} Returns the random elements. + */ + function arraySampleSize(array, n) { + return shuffleSelf(copyArray(array), baseClamp(n, 0, array.length)); + } + + /** + * A specialized version of `_.shuffle` for arrays. + * + * @private + * @param {Array} array The array to shuffle. + * @returns {Array} Returns the new shuffled array. + */ + function arrayShuffle(array) { + return shuffleSelf(copyArray(array)); + } + + /** + * This function is like `assignValue` except that it doesn't assign + * `undefined` values. + * + * @private + * @param {Object} object The object to modify. + * @param {string} key The key of the property to assign. + * @param {*} value The value to assign. + */ + function assignMergeValue(object, key, value) { + if ((value !== undefined && !eq(object[key], value)) || + (value === undefined && !(key in object))) { + baseAssignValue(object, key, value); + } + } + + /** + * Assigns `value` to `key` of `object` if the existing value is not equivalent + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. + * + * @private + * @param {Object} object The object to modify. + * @param {string} key The key of the property to assign. + * @param {*} value The value to assign. + */ + function assignValue(object, key, value) { + var objValue = object[key]; + if (!(hasOwnProperty.call(object, key) && eq(objValue, value)) || + (value === undefined && !(key in object))) { + baseAssignValue(object, key, value); + } + } + + /** + * Gets the index at which the `key` is found in `array` of key-value pairs. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} key The key to search for. + * @returns {number} Returns the index of the matched value, else `-1`. + */ + function assocIndexOf(array, key) { + var length = array.length; + while (length--) { + if (eq(array[length][0], key)) { + return length; + } + } + return -1; + } + + /** + * Aggregates elements of `collection` on `accumulator` with keys transformed + * by `iteratee` and values set by `setter`. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} setter The function to set `accumulator` values. + * @param {Function} iteratee The iteratee to transform keys. + * @param {Object} accumulator The initial aggregated object. + * @returns {Function} Returns `accumulator`. + */ + function baseAggregator(collection, setter, iteratee, accumulator) { + baseEach(collection, function(value, key, collection) { + setter(accumulator, value, iteratee(value), collection); + }); + return accumulator; + } + + /** + * The base implementation of `_.assign` without support for multiple sources + * or `customizer` functions. + * + * @private + * @param {Object} object The destination object. + * @param {Object} source The source object. + * @returns {Object} Returns `object`. + */ + function baseAssign(object, source) { + return object && copyObject(source, keys(source), object); + } + + /** + * The base implementation of `_.assignIn` without support for multiple sources + * or `customizer` functions. + * + * @private + * @param {Object} object The destination object. + * @param {Object} source The source object. + * @returns {Object} Returns `object`. + */ + function baseAssignIn(object, source) { + return object && copyObject(source, keysIn(source), object); + } + + /** + * The base implementation of `assignValue` and `assignMergeValue` without + * value checks. + * + * @private + * @param {Object} object The object to modify. + * @param {string} key The key of the property to assign. + * @param {*} value The value to assign. + */ + function baseAssignValue(object, key, value) { + if (key == '__proto__' && defineProperty) { + defineProperty(object, key, { + 'configurable': true, + 'enumerable': true, + 'value': value, + 'writable': true + }); + } else { + object[key] = value; + } + } + + /** + * The base implementation of `_.at` without support for individual paths. + * + * @private + * @param {Object} object The object to iterate over. + * @param {string[]} paths The property paths to pick. + * @returns {Array} Returns the picked elements. + */ + function baseAt(object, paths) { + var index = -1, + length = paths.length, + result = Array(length), + skip = object == null; + + while (++index < length) { + result[index] = skip ? undefined : get(object, paths[index]); + } + return result; + } + + /** + * The base implementation of `_.clamp` which doesn't coerce arguments. + * + * @private + * @param {number} number The number to clamp. + * @param {number} [lower] The lower bound. + * @param {number} upper The upper bound. + * @returns {number} Returns the clamped number. + */ + function baseClamp(number, lower, upper) { + if (number === number) { + if (upper !== undefined) { + number = number <= upper ? number : upper; + } + if (lower !== undefined) { + number = number >= lower ? number : lower; + } + } + return number; + } + + /** + * The base implementation of `_.clone` and `_.cloneDeep` which tracks + * traversed objects. + * + * @private + * @param {*} value The value to clone. + * @param {boolean} bitmask The bitmask flags. + * 1 - Deep clone + * 2 - Flatten inherited properties + * 4 - Clone symbols + * @param {Function} [customizer] The function to customize cloning. + * @param {string} [key] The key of `value`. + * @param {Object} [object] The parent object of `value`. + * @param {Object} [stack] Tracks traversed objects and their clone counterparts. + * @returns {*} Returns the cloned value. + */ + function baseClone(value, bitmask, customizer, key, object, stack) { + var result, + isDeep = bitmask & CLONE_DEEP_FLAG, + isFlat = bitmask & CLONE_FLAT_FLAG, + isFull = bitmask & CLONE_SYMBOLS_FLAG; + + if (customizer) { + result = object ? customizer(value, key, object, stack) : customizer(value); + } + if (result !== undefined) { + return result; + } + if (!isObject(value)) { + return value; + } + var isArr = isArray(value); + if (isArr) { + result = initCloneArray(value); + if (!isDeep) { + return copyArray(value, result); + } + } else { + var tag = getTag(value), + isFunc = tag == funcTag || tag == genTag; + + if (isBuffer(value)) { + return cloneBuffer(value, isDeep); + } + if (tag == objectTag || tag == argsTag || (isFunc && !object)) { + result = (isFlat || isFunc) ? {} : initCloneObject(value); + if (!isDeep) { + return isFlat + ? copySymbolsIn(value, baseAssignIn(result, value)) + : copySymbols(value, baseAssign(result, value)); + } + } else { + if (!cloneableTags[tag]) { + return object ? value : {}; + } + result = initCloneByTag(value, tag, isDeep); + } + } + // Check for circular references and return its corresponding clone. + stack || (stack = new Stack); + var stacked = stack.get(value); + if (stacked) { + return stacked; + } + stack.set(value, result); + + if (isSet(value)) { + value.forEach(function(subValue) { + result.add(baseClone(subValue, bitmask, customizer, subValue, value, stack)); + }); + } else if (isMap(value)) { + value.forEach(function(subValue, key) { + result.set(key, baseClone(subValue, bitmask, customizer, key, value, stack)); + }); + } + + var keysFunc = isFull + ? (isFlat ? getAllKeysIn : getAllKeys) + : (isFlat ? keysIn : keys); + + var props = isArr ? undefined : keysFunc(value); + arrayEach(props || value, function(subValue, key) { + if (props) { + key = subValue; + subValue = value[key]; + } + // Recursively populate clone (susceptible to call stack limits). + assignValue(result, key, baseClone(subValue, bitmask, customizer, key, value, stack)); + }); + return result; + } + + /** + * The base implementation of `_.conforms` which doesn't clone `source`. + * + * @private + * @param {Object} source The object of property predicates to conform to. + * @returns {Function} Returns the new spec function. + */ + function baseConforms(source) { + var props = keys(source); + return function(object) { + return baseConformsTo(object, source, props); + }; + } + + /** + * The base implementation of `_.conformsTo` which accepts `props` to check. + * + * @private + * @param {Object} object The object to inspect. + * @param {Object} source The object of property predicates to conform to. + * @returns {boolean} Returns `true` if `object` conforms, else `false`. + */ + function baseConformsTo(object, source, props) { + var length = props.length; + if (object == null) { + return !length; + } + object = Object(object); + while (length--) { + var key = props[length], + predicate = source[key], + value = object[key]; + + if ((value === undefined && !(key in object)) || !predicate(value)) { + return false; + } + } + return true; + } + + /** + * The base implementation of `_.delay` and `_.defer` which accepts `args` + * to provide to `func`. + * + * @private + * @param {Function} func The function to delay. + * @param {number} wait The number of milliseconds to delay invocation. + * @param {Array} args The arguments to provide to `func`. + * @returns {number|Object} Returns the timer id or timeout object. + */ + function baseDelay(func, wait, args) { + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + return setTimeout(function() { func.apply(undefined, args); }, wait); + } + + /** + * The base implementation of methods like `_.difference` without support + * for excluding multiple arrays or iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Array} values The values to exclude. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of filtered values. + */ + function baseDifference(array, values, iteratee, comparator) { + var index = -1, + includes = arrayIncludes, + isCommon = true, + length = array.length, + result = [], + valuesLength = values.length; + + if (!length) { + return result; + } + if (iteratee) { + values = arrayMap(values, baseUnary(iteratee)); + } + if (comparator) { + includes = arrayIncludesWith; + isCommon = false; + } + else if (values.length >= LARGE_ARRAY_SIZE) { + includes = cacheHas; + isCommon = false; + values = new SetCache(values); + } + outer: + while (++index < length) { + var value = array[index], + computed = iteratee == null ? value : iteratee(value); + + value = (comparator || value !== 0) ? value : 0; + if (isCommon && computed === computed) { + var valuesIndex = valuesLength; + while (valuesIndex--) { + if (values[valuesIndex] === computed) { + continue outer; + } + } + result.push(value); + } + else if (!includes(values, computed, comparator)) { + result.push(value); + } + } + return result; + } + + /** + * The base implementation of `_.forEach` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array|Object} Returns `collection`. + */ + var baseEach = createBaseEach(baseForOwn); + + /** + * The base implementation of `_.forEachRight` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array|Object} Returns `collection`. + */ + var baseEachRight = createBaseEach(baseForOwnRight, true); + + /** + * The base implementation of `_.every` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {boolean} Returns `true` if all elements pass the predicate check, + * else `false` + */ + function baseEvery(collection, predicate) { + var result = true; + baseEach(collection, function(value, index, collection) { + result = !!predicate(value, index, collection); + return result; + }); + return result; + } + + /** + * The base implementation of methods like `_.max` and `_.min` which accepts a + * `comparator` to determine the extremum value. + * + * @private + * @param {Array} array The array to iterate over. + * @param {Function} iteratee The iteratee invoked per iteration. + * @param {Function} comparator The comparator used to compare values. + * @returns {*} Returns the extremum value. + */ + function baseExtremum(array, iteratee, comparator) { + var index = -1, + length = array.length; + + while (++index < length) { + var value = array[index], + current = iteratee(value); + + if (current != null && (computed === undefined + ? (current === current && !isSymbol(current)) + : comparator(current, computed) + )) { + var computed = current, + result = value; + } + } + return result; + } + + /** + * The base implementation of `_.fill` without an iteratee call guard. + * + * @private + * @param {Array} array The array to fill. + * @param {*} value The value to fill `array` with. + * @param {number} [start=0] The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns `array`. + */ + function baseFill(array, value, start, end) { + var length = array.length; + + start = toInteger(start); + if (start < 0) { + start = -start > length ? 0 : (length + start); + } + end = (end === undefined || end > length) ? length : toInteger(end); + if (end < 0) { + end += length; + } + end = start > end ? 0 : toLength(end); + while (start < end) { + array[start++] = value; + } + return array; + } + + /** + * The base implementation of `_.filter` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {Array} Returns the new filtered array. + */ + function baseFilter(collection, predicate) { + var result = []; + baseEach(collection, function(value, index, collection) { + if (predicate(value, index, collection)) { + result.push(value); + } + }); + return result; + } + + /** + * The base implementation of `_.flatten` with support for restricting flattening. + * + * @private + * @param {Array} array The array to flatten. + * @param {number} depth The maximum recursion depth. + * @param {boolean} [predicate=isFlattenable] The function invoked per iteration. + * @param {boolean} [isStrict] Restrict to values that pass `predicate` checks. + * @param {Array} [result=[]] The initial result value. + * @returns {Array} Returns the new flattened array. + */ + function baseFlatten(array, depth, predicate, isStrict, result) { + var index = -1, + length = array.length; + + predicate || (predicate = isFlattenable); + result || (result = []); + + while (++index < length) { + var value = array[index]; + if (depth > 0 && predicate(value)) { + if (depth > 1) { + // Recursively flatten arrays (susceptible to call stack limits). + baseFlatten(value, depth - 1, predicate, isStrict, result); + } else { + arrayPush(result, value); + } + } else if (!isStrict) { + result[result.length] = value; + } + } + return result; + } + + /** + * The base implementation of `baseForOwn` which iterates over `object` + * properties returned by `keysFunc` and invokes `iteratee` for each property. + * Iteratee functions may exit iteration early by explicitly returning `false`. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {Function} keysFunc The function to get the keys of `object`. + * @returns {Object} Returns `object`. + */ + var baseFor = createBaseFor(); + + /** + * This function is like `baseFor` except that it iterates over properties + * in the opposite order. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @param {Function} keysFunc The function to get the keys of `object`. + * @returns {Object} Returns `object`. + */ + var baseForRight = createBaseFor(true); + + /** + * The base implementation of `_.forOwn` without support for iteratee shorthands. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Object} Returns `object`. + */ + function baseForOwn(object, iteratee) { + return object && baseFor(object, iteratee, keys); + } + + /** + * The base implementation of `_.forOwnRight` without support for iteratee shorthands. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Object} Returns `object`. + */ + function baseForOwnRight(object, iteratee) { + return object && baseForRight(object, iteratee, keys); + } + + /** + * The base implementation of `_.functions` which creates an array of + * `object` function property names filtered from `props`. + * + * @private + * @param {Object} object The object to inspect. + * @param {Array} props The property names to filter. + * @returns {Array} Returns the function names. + */ + function baseFunctions(object, props) { + return arrayFilter(props, function(key) { + return isFunction(object[key]); + }); + } + + /** + * The base implementation of `_.get` without support for default values. + * + * @private + * @param {Object} object The object to query. + * @param {Array|string} path The path of the property to get. + * @returns {*} Returns the resolved value. + */ + function baseGet(object, path) { + path = castPath(path, object); + + var index = 0, + length = path.length; + + while (object != null && index < length) { + object = object[toKey(path[index++])]; + } + return (index && index == length) ? object : undefined; + } + + /** + * The base implementation of `getAllKeys` and `getAllKeysIn` which uses + * `keysFunc` and `symbolsFunc` to get the enumerable property names and + * symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @param {Function} keysFunc The function to get the keys of `object`. + * @param {Function} symbolsFunc The function to get the symbols of `object`. + * @returns {Array} Returns the array of property names and symbols. + */ + function baseGetAllKeys(object, keysFunc, symbolsFunc) { + var result = keysFunc(object); + return isArray(object) ? result : arrayPush(result, symbolsFunc(object)); + } + + /** + * The base implementation of `getTag` without fallbacks for buggy environments. + * + * @private + * @param {*} value The value to query. + * @returns {string} Returns the `toStringTag`. + */ + function baseGetTag(value) { + if (value == null) { + return value === undefined ? undefinedTag : nullTag; + } + return (symToStringTag && symToStringTag in Object(value)) + ? getRawTag(value) + : objectToString(value); + } + + /** + * The base implementation of `_.gt` which doesn't coerce arguments. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is greater than `other`, + * else `false`. + */ + function baseGt(value, other) { + return value > other; + } + + /** + * The base implementation of `_.has` without support for deep paths. + * + * @private + * @param {Object} [object] The object to query. + * @param {Array|string} key The key to check. + * @returns {boolean} Returns `true` if `key` exists, else `false`. + */ + function baseHas(object, key) { + return object != null && hasOwnProperty.call(object, key); + } + + /** + * The base implementation of `_.hasIn` without support for deep paths. + * + * @private + * @param {Object} [object] The object to query. + * @param {Array|string} key The key to check. + * @returns {boolean} Returns `true` if `key` exists, else `false`. + */ + function baseHasIn(object, key) { + return object != null && key in Object(object); + } + + /** + * The base implementation of `_.inRange` which doesn't coerce arguments. + * + * @private + * @param {number} number The number to check. + * @param {number} start The start of the range. + * @param {number} end The end of the range. + * @returns {boolean} Returns `true` if `number` is in the range, else `false`. + */ + function baseInRange(number, start, end) { + return number >= nativeMin(start, end) && number < nativeMax(start, end); + } + + /** + * The base implementation of methods like `_.intersection`, without support + * for iteratee shorthands, that accepts an array of arrays to inspect. + * + * @private + * @param {Array} arrays The arrays to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of shared values. + */ + function baseIntersection(arrays, iteratee, comparator) { + var includes = comparator ? arrayIncludesWith : arrayIncludes, + length = arrays[0].length, + othLength = arrays.length, + othIndex = othLength, + caches = Array(othLength), + maxLength = Infinity, + result = []; + + while (othIndex--) { + var array = arrays[othIndex]; + if (othIndex && iteratee) { + array = arrayMap(array, baseUnary(iteratee)); + } + maxLength = nativeMin(array.length, maxLength); + caches[othIndex] = !comparator && (iteratee || (length >= 120 && array.length >= 120)) + ? new SetCache(othIndex && array) + : undefined; + } + array = arrays[0]; + + var index = -1, + seen = caches[0]; + + outer: + while (++index < length && result.length < maxLength) { + var value = array[index], + computed = iteratee ? iteratee(value) : value; + + value = (comparator || value !== 0) ? value : 0; + if (!(seen + ? cacheHas(seen, computed) + : includes(result, computed, comparator) + )) { + othIndex = othLength; + while (--othIndex) { + var cache = caches[othIndex]; + if (!(cache + ? cacheHas(cache, computed) + : includes(arrays[othIndex], computed, comparator)) + ) { + continue outer; + } + } + if (seen) { + seen.push(computed); + } + result.push(value); + } + } + return result; + } + + /** + * The base implementation of `_.invert` and `_.invertBy` which inverts + * `object` with values transformed by `iteratee` and set by `setter`. + * + * @private + * @param {Object} object The object to iterate over. + * @param {Function} setter The function to set `accumulator` values. + * @param {Function} iteratee The iteratee to transform values. + * @param {Object} accumulator The initial inverted object. + * @returns {Function} Returns `accumulator`. + */ + function baseInverter(object, setter, iteratee, accumulator) { + baseForOwn(object, function(value, key, object) { + setter(accumulator, iteratee(value), key, object); + }); + return accumulator; + } + + /** + * The base implementation of `_.invoke` without support for individual + * method arguments. + * + * @private + * @param {Object} object The object to query. + * @param {Array|string} path The path of the method to invoke. + * @param {Array} args The arguments to invoke the method with. + * @returns {*} Returns the result of the invoked method. + */ + function baseInvoke(object, path, args) { + path = castPath(path, object); + object = parent(object, path); + var func = object == null ? object : object[toKey(last(path))]; + return func == null ? undefined : apply(func, object, args); + } + + /** + * The base implementation of `_.isArguments`. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an `arguments` object, + */ + function baseIsArguments(value) { + return isObjectLike(value) && baseGetTag(value) == argsTag; + } + + /** + * The base implementation of `_.isArrayBuffer` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array buffer, else `false`. + */ + function baseIsArrayBuffer(value) { + return isObjectLike(value) && baseGetTag(value) == arrayBufferTag; + } + + /** + * The base implementation of `_.isDate` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a date object, else `false`. + */ + function baseIsDate(value) { + return isObjectLike(value) && baseGetTag(value) == dateTag; + } + + /** + * The base implementation of `_.isEqual` which supports partial comparisons + * and tracks traversed objects. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @param {boolean} bitmask The bitmask flags. + * 1 - Unordered comparison + * 2 - Partial comparison + * @param {Function} [customizer] The function to customize comparisons. + * @param {Object} [stack] Tracks traversed `value` and `other` objects. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + */ + function baseIsEqual(value, other, bitmask, customizer, stack) { + if (value === other) { + return true; + } + if (value == null || other == null || (!isObjectLike(value) && !isObjectLike(other))) { + return value !== value && other !== other; + } + return baseIsEqualDeep(value, other, bitmask, customizer, baseIsEqual, stack); + } + + /** + * A specialized version of `baseIsEqual` for arrays and objects which performs + * deep comparisons and tracks traversed objects enabling objects with circular + * references to be compared. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} [stack] Tracks traversed `object` and `other` objects. + * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. + */ + function baseIsEqualDeep(object, other, bitmask, customizer, equalFunc, stack) { + var objIsArr = isArray(object), + othIsArr = isArray(other), + objTag = objIsArr ? arrayTag : getTag(object), + othTag = othIsArr ? arrayTag : getTag(other); + + objTag = objTag == argsTag ? objectTag : objTag; + othTag = othTag == argsTag ? objectTag : othTag; + + var objIsObj = objTag == objectTag, + othIsObj = othTag == objectTag, + isSameTag = objTag == othTag; + + if (isSameTag && isBuffer(object)) { + if (!isBuffer(other)) { + return false; + } + objIsArr = true; + objIsObj = false; + } + if (isSameTag && !objIsObj) { + stack || (stack = new Stack); + return (objIsArr || isTypedArray(object)) + ? equalArrays(object, other, bitmask, customizer, equalFunc, stack) + : equalByTag(object, other, objTag, bitmask, customizer, equalFunc, stack); + } + if (!(bitmask & COMPARE_PARTIAL_FLAG)) { + var objIsWrapped = objIsObj && hasOwnProperty.call(object, '__wrapped__'), + othIsWrapped = othIsObj && hasOwnProperty.call(other, '__wrapped__'); + + if (objIsWrapped || othIsWrapped) { + var objUnwrapped = objIsWrapped ? object.value() : object, + othUnwrapped = othIsWrapped ? other.value() : other; + + stack || (stack = new Stack); + return equalFunc(objUnwrapped, othUnwrapped, bitmask, customizer, stack); + } + } + if (!isSameTag) { + return false; + } + stack || (stack = new Stack); + return equalObjects(object, other, bitmask, customizer, equalFunc, stack); + } + + /** + * The base implementation of `_.isMap` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a map, else `false`. + */ + function baseIsMap(value) { + return isObjectLike(value) && getTag(value) == mapTag; + } + + /** + * The base implementation of `_.isMatch` without support for iteratee shorthands. + * + * @private + * @param {Object} object The object to inspect. + * @param {Object} source The object of property values to match. + * @param {Array} matchData The property names, values, and compare flags to match. + * @param {Function} [customizer] The function to customize comparisons. + * @returns {boolean} Returns `true` if `object` is a match, else `false`. + */ + function baseIsMatch(object, source, matchData, customizer) { + var index = matchData.length, + length = index, + noCustomizer = !customizer; + + if (object == null) { + return !length; + } + object = Object(object); + while (index--) { + var data = matchData[index]; + if ((noCustomizer && data[2]) + ? data[1] !== object[data[0]] + : !(data[0] in object) + ) { + return false; + } + } + while (++index < length) { + data = matchData[index]; + var key = data[0], + objValue = object[key], + srcValue = data[1]; + + if (noCustomizer && data[2]) { + if (objValue === undefined && !(key in object)) { + return false; + } + } else { + var stack = new Stack; + if (customizer) { + var result = customizer(objValue, srcValue, key, object, source, stack); + } + if (!(result === undefined + ? baseIsEqual(srcValue, objValue, COMPARE_PARTIAL_FLAG | COMPARE_UNORDERED_FLAG, customizer, stack) + : result + )) { + return false; + } + } + } + return true; + } + + /** + * The base implementation of `_.isNative` without bad shim checks. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a native function, + * else `false`. + */ + function baseIsNative(value) { + if (!isObject(value) || isMasked(value)) { + return false; + } + var pattern = isFunction(value) ? reIsNative : reIsHostCtor; + return pattern.test(toSource(value)); + } + + /** + * The base implementation of `_.isRegExp` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a regexp, else `false`. + */ + function baseIsRegExp(value) { + return isObjectLike(value) && baseGetTag(value) == regexpTag; + } + + /** + * The base implementation of `_.isSet` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a set, else `false`. + */ + function baseIsSet(value) { + return isObjectLike(value) && getTag(value) == setTag; + } + + /** + * The base implementation of `_.isTypedArray` without Node.js optimizations. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a typed array, else `false`. + */ + function baseIsTypedArray(value) { + return isObjectLike(value) && + isLength(value.length) && !!typedArrayTags[baseGetTag(value)]; + } + + /** + * The base implementation of `_.iteratee`. + * + * @private + * @param {*} [value=_.identity] The value to convert to an iteratee. + * @returns {Function} Returns the iteratee. + */ + function baseIteratee(value) { + // Don't store the `typeof` result in a variable to avoid a JIT bug in Safari 9. + // See https://bugs.webkit.org/show_bug.cgi?id=156034 for more details. + if (typeof value == 'function') { + return value; + } + if (value == null) { + return identity; + } + if (typeof value == 'object') { + return isArray(value) + ? baseMatchesProperty(value[0], value[1]) + : baseMatches(value); + } + return property(value); + } + + /** + * The base implementation of `_.keys` which doesn't treat sparse arrays as dense. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + */ + function baseKeys(object) { + if (!isPrototype(object)) { + return nativeKeys(object); + } + var result = []; + for (var key in Object(object)) { + if (hasOwnProperty.call(object, key) && key != 'constructor') { + result.push(key); + } + } + return result; + } + + /** + * The base implementation of `_.keysIn` which doesn't treat sparse arrays as dense. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + */ + function baseKeysIn(object) { + if (!isObject(object)) { + return nativeKeysIn(object); + } + var isProto = isPrototype(object), + result = []; + + for (var key in object) { + if (!(key == 'constructor' && (isProto || !hasOwnProperty.call(object, key)))) { + result.push(key); + } + } + return result; + } + + /** + * The base implementation of `_.lt` which doesn't coerce arguments. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is less than `other`, + * else `false`. + */ + function baseLt(value, other) { + return value < other; + } + + /** + * The base implementation of `_.map` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} iteratee The function invoked per iteration. + * @returns {Array} Returns the new mapped array. + */ + function baseMap(collection, iteratee) { + var index = -1, + result = isArrayLike(collection) ? Array(collection.length) : []; + + baseEach(collection, function(value, key, collection) { + result[++index] = iteratee(value, key, collection); + }); + return result; + } + + /** + * The base implementation of `_.matches` which doesn't clone `source`. + * + * @private + * @param {Object} source The object of property values to match. + * @returns {Function} Returns the new spec function. + */ + function baseMatches(source) { + var matchData = getMatchData(source); + if (matchData.length == 1 && matchData[0][2]) { + return matchesStrictComparable(matchData[0][0], matchData[0][1]); + } + return function(object) { + return object === source || baseIsMatch(object, source, matchData); + }; + } + + /** + * The base implementation of `_.matchesProperty` which doesn't clone `srcValue`. + * + * @private + * @param {string} path The path of the property to get. + * @param {*} srcValue The value to match. + * @returns {Function} Returns the new spec function. + */ + function baseMatchesProperty(path, srcValue) { + if (isKey(path) && isStrictComparable(srcValue)) { + return matchesStrictComparable(toKey(path), srcValue); + } + return function(object) { + var objValue = get(object, path); + return (objValue === undefined && objValue === srcValue) + ? hasIn(object, path) + : baseIsEqual(srcValue, objValue, COMPARE_PARTIAL_FLAG | COMPARE_UNORDERED_FLAG); + }; + } + + /** + * The base implementation of `_.merge` without support for multiple sources. + * + * @private + * @param {Object} object The destination object. + * @param {Object} source The source object. + * @param {number} srcIndex The index of `source`. + * @param {Function} [customizer] The function to customize merged values. + * @param {Object} [stack] Tracks traversed source values and their merged + * counterparts. + */ + function baseMerge(object, source, srcIndex, customizer, stack) { + if (object === source) { + return; + } + baseFor(source, function(srcValue, key) { + stack || (stack = new Stack); + if (isObject(srcValue)) { + baseMergeDeep(object, source, key, srcIndex, baseMerge, customizer, stack); + } + else { + var newValue = customizer + ? customizer(safeGet(object, key), srcValue, (key + ''), object, source, stack) + : undefined; + + if (newValue === undefined) { + newValue = srcValue; + } + assignMergeValue(object, key, newValue); + } + }, keysIn); + } + + /** + * A specialized version of `baseMerge` for arrays and objects which performs + * deep merges and tracks traversed objects enabling objects with circular + * references to be merged. + * + * @private + * @param {Object} object The destination object. + * @param {Object} source The source object. + * @param {string} key The key of the value to merge. + * @param {number} srcIndex The index of `source`. + * @param {Function} mergeFunc The function to merge values. + * @param {Function} [customizer] The function to customize assigned values. + * @param {Object} [stack] Tracks traversed source values and their merged + * counterparts. + */ + function baseMergeDeep(object, source, key, srcIndex, mergeFunc, customizer, stack) { + var objValue = safeGet(object, key), + srcValue = safeGet(source, key), + stacked = stack.get(srcValue); + + if (stacked) { + assignMergeValue(object, key, stacked); + return; + } + var newValue = customizer + ? customizer(objValue, srcValue, (key + ''), object, source, stack) + : undefined; + + var isCommon = newValue === undefined; + + if (isCommon) { + var isArr = isArray(srcValue), + isBuff = !isArr && isBuffer(srcValue), + isTyped = !isArr && !isBuff && isTypedArray(srcValue); + + newValue = srcValue; + if (isArr || isBuff || isTyped) { + if (isArray(objValue)) { + newValue = objValue; + } + else if (isArrayLikeObject(objValue)) { + newValue = copyArray(objValue); + } + else if (isBuff) { + isCommon = false; + newValue = cloneBuffer(srcValue, true); + } + else if (isTyped) { + isCommon = false; + newValue = cloneTypedArray(srcValue, true); + } + else { + newValue = []; + } + } + else if (isPlainObject(srcValue) || isArguments(srcValue)) { + newValue = objValue; + if (isArguments(objValue)) { + newValue = toPlainObject(objValue); + } + else if (!isObject(objValue) || isFunction(objValue)) { + newValue = initCloneObject(srcValue); + } + } + else { + isCommon = false; + } + } + if (isCommon) { + // Recursively merge objects and arrays (susceptible to call stack limits). + stack.set(srcValue, newValue); + mergeFunc(newValue, srcValue, srcIndex, customizer, stack); + stack['delete'](srcValue); + } + assignMergeValue(object, key, newValue); + } + + /** + * The base implementation of `_.nth` which doesn't coerce arguments. + * + * @private + * @param {Array} array The array to query. + * @param {number} n The index of the element to return. + * @returns {*} Returns the nth element of `array`. + */ + function baseNth(array, n) { + var length = array.length; + if (!length) { + return; + } + n += n < 0 ? length : 0; + return isIndex(n, length) ? array[n] : undefined; + } + + /** + * The base implementation of `_.orderBy` without param guards. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function[]|Object[]|string[]} iteratees The iteratees to sort by. + * @param {string[]} orders The sort orders of `iteratees`. + * @returns {Array} Returns the new sorted array. + */ + function baseOrderBy(collection, iteratees, orders) { + if (iteratees.length) { + iteratees = arrayMap(iteratees, function(iteratee) { + if (isArray(iteratee)) { + return function(value) { + return baseGet(value, iteratee.length === 1 ? iteratee[0] : iteratee); + } + } + return iteratee; + }); + } else { + iteratees = [identity]; + } + + var index = -1; + iteratees = arrayMap(iteratees, baseUnary(getIteratee())); + + var result = baseMap(collection, function(value, key, collection) { + var criteria = arrayMap(iteratees, function(iteratee) { + return iteratee(value); + }); + return { 'criteria': criteria, 'index': ++index, 'value': value }; + }); + + return baseSortBy(result, function(object, other) { + return compareMultiple(object, other, orders); + }); + } + + /** + * The base implementation of `_.pick` without support for individual + * property identifiers. + * + * @private + * @param {Object} object The source object. + * @param {string[]} paths The property paths to pick. + * @returns {Object} Returns the new object. + */ + function basePick(object, paths) { + return basePickBy(object, paths, function(value, path) { + return hasIn(object, path); + }); + } + + /** + * The base implementation of `_.pickBy` without support for iteratee shorthands. + * + * @private + * @param {Object} object The source object. + * @param {string[]} paths The property paths to pick. + * @param {Function} predicate The function invoked per property. + * @returns {Object} Returns the new object. + */ + function basePickBy(object, paths, predicate) { + var index = -1, + length = paths.length, + result = {}; + + while (++index < length) { + var path = paths[index], + value = baseGet(object, path); + + if (predicate(value, path)) { + baseSet(result, castPath(path, object), value); + } + } + return result; + } + + /** + * A specialized version of `baseProperty` which supports deep paths. + * + * @private + * @param {Array|string} path The path of the property to get. + * @returns {Function} Returns the new accessor function. + */ + function basePropertyDeep(path) { + return function(object) { + return baseGet(object, path); + }; + } + + /** + * The base implementation of `_.pullAllBy` without support for iteratee + * shorthands. + * + * @private + * @param {Array} array The array to modify. + * @param {Array} values The values to remove. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns `array`. + */ + function basePullAll(array, values, iteratee, comparator) { + var indexOf = comparator ? baseIndexOfWith : baseIndexOf, + index = -1, + length = values.length, + seen = array; + + if (array === values) { + values = copyArray(values); + } + if (iteratee) { + seen = arrayMap(array, baseUnary(iteratee)); + } + while (++index < length) { + var fromIndex = 0, + value = values[index], + computed = iteratee ? iteratee(value) : value; + + while ((fromIndex = indexOf(seen, computed, fromIndex, comparator)) > -1) { + if (seen !== array) { + splice.call(seen, fromIndex, 1); + } + splice.call(array, fromIndex, 1); + } + } + return array; + } + + /** + * The base implementation of `_.pullAt` without support for individual + * indexes or capturing the removed elements. + * + * @private + * @param {Array} array The array to modify. + * @param {number[]} indexes The indexes of elements to remove. + * @returns {Array} Returns `array`. + */ + function basePullAt(array, indexes) { + var length = array ? indexes.length : 0, + lastIndex = length - 1; + + while (length--) { + var index = indexes[length]; + if (length == lastIndex || index !== previous) { + var previous = index; + if (isIndex(index)) { + splice.call(array, index, 1); + } else { + baseUnset(array, index); + } + } + } + return array; + } + + /** + * The base implementation of `_.random` without support for returning + * floating-point numbers. + * + * @private + * @param {number} lower The lower bound. + * @param {number} upper The upper bound. + * @returns {number} Returns the random number. + */ + function baseRandom(lower, upper) { + return lower + nativeFloor(nativeRandom() * (upper - lower + 1)); + } + + /** + * The base implementation of `_.range` and `_.rangeRight` which doesn't + * coerce arguments. + * + * @private + * @param {number} start The start of the range. + * @param {number} end The end of the range. + * @param {number} step The value to increment or decrement by. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Array} Returns the range of numbers. + */ + function baseRange(start, end, step, fromRight) { + var index = -1, + length = nativeMax(nativeCeil((end - start) / (step || 1)), 0), + result = Array(length); + + while (length--) { + result[fromRight ? length : ++index] = start; + start += step; + } + return result; + } + + /** + * The base implementation of `_.repeat` which doesn't coerce arguments. + * + * @private + * @param {string} string The string to repeat. + * @param {number} n The number of times to repeat the string. + * @returns {string} Returns the repeated string. + */ + function baseRepeat(string, n) { + var result = ''; + if (!string || n < 1 || n > MAX_SAFE_INTEGER) { + return result; + } + // Leverage the exponentiation by squaring algorithm for a faster repeat. + // See https://en.wikipedia.org/wiki/Exponentiation_by_squaring for more details. + do { + if (n % 2) { + result += string; + } + n = nativeFloor(n / 2); + if (n) { + string += string; + } + } while (n); + + return result; + } + + /** + * The base implementation of `_.rest` which doesn't validate or coerce arguments. + * + * @private + * @param {Function} func The function to apply a rest parameter to. + * @param {number} [start=func.length-1] The start position of the rest parameter. + * @returns {Function} Returns the new function. + */ + function baseRest(func, start) { + return setToString(overRest(func, start, identity), func + ''); + } + + /** + * The base implementation of `_.sample`. + * + * @private + * @param {Array|Object} collection The collection to sample. + * @returns {*} Returns the random element. + */ + function baseSample(collection) { + return arraySample(values(collection)); + } + + /** + * The base implementation of `_.sampleSize` without param guards. + * + * @private + * @param {Array|Object} collection The collection to sample. + * @param {number} n The number of elements to sample. + * @returns {Array} Returns the random elements. + */ + function baseSampleSize(collection, n) { + var array = values(collection); + return shuffleSelf(array, baseClamp(n, 0, array.length)); + } + + /** + * The base implementation of `_.set`. + * + * @private + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to set. + * @param {*} value The value to set. + * @param {Function} [customizer] The function to customize path creation. + * @returns {Object} Returns `object`. + */ + function baseSet(object, path, value, customizer) { + if (!isObject(object)) { + return object; + } + path = castPath(path, object); + + var index = -1, + length = path.length, + lastIndex = length - 1, + nested = object; + + while (nested != null && ++index < length) { + var key = toKey(path[index]), + newValue = value; + + if (key === '__proto__' || key === 'constructor' || key === 'prototype') { + return object; + } + + if (index != lastIndex) { + var objValue = nested[key]; + newValue = customizer ? customizer(objValue, key, nested) : undefined; + if (newValue === undefined) { + newValue = isObject(objValue) + ? objValue + : (isIndex(path[index + 1]) ? [] : {}); + } + } + assignValue(nested, key, newValue); + nested = nested[key]; + } + return object; + } + + /** + * The base implementation of `setData` without support for hot loop shorting. + * + * @private + * @param {Function} func The function to associate metadata with. + * @param {*} data The metadata. + * @returns {Function} Returns `func`. + */ + var baseSetData = !metaMap ? identity : function(func, data) { + metaMap.set(func, data); + return func; + }; + + /** + * The base implementation of `setToString` without support for hot loop shorting. + * + * @private + * @param {Function} func The function to modify. + * @param {Function} string The `toString` result. + * @returns {Function} Returns `func`. + */ + var baseSetToString = !defineProperty ? identity : function(func, string) { + return defineProperty(func, 'toString', { + 'configurable': true, + 'enumerable': false, + 'value': constant(string), + 'writable': true + }); + }; + + /** + * The base implementation of `_.shuffle`. + * + * @private + * @param {Array|Object} collection The collection to shuffle. + * @returns {Array} Returns the new shuffled array. + */ + function baseShuffle(collection) { + return shuffleSelf(values(collection)); + } + + /** + * The base implementation of `_.slice` without an iteratee call guard. + * + * @private + * @param {Array} array The array to slice. + * @param {number} [start=0] The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns the slice of `array`. + */ + function baseSlice(array, start, end) { + var index = -1, + length = array.length; + + if (start < 0) { + start = -start > length ? 0 : (length + start); + } + end = end > length ? length : end; + if (end < 0) { + end += length; + } + length = start > end ? 0 : ((end - start) >>> 0); + start >>>= 0; + + var result = Array(length); + while (++index < length) { + result[index] = array[index + start]; + } + return result; + } + + /** + * The base implementation of `_.some` without support for iteratee shorthands. + * + * @private + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} predicate The function invoked per iteration. + * @returns {boolean} Returns `true` if any element passes the predicate check, + * else `false`. + */ + function baseSome(collection, predicate) { + var result; + + baseEach(collection, function(value, index, collection) { + result = predicate(value, index, collection); + return !result; + }); + return !!result; + } + + /** + * The base implementation of `_.sortedIndex` and `_.sortedLastIndex` which + * performs a binary search of `array` to determine the index at which `value` + * should be inserted into `array` in order to maintain its sort order. + * + * @private + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @param {boolean} [retHighest] Specify returning the highest qualified index. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + */ + function baseSortedIndex(array, value, retHighest) { + var low = 0, + high = array == null ? low : array.length; + + if (typeof value == 'number' && value === value && high <= HALF_MAX_ARRAY_LENGTH) { + while (low < high) { + var mid = (low + high) >>> 1, + computed = array[mid]; + + if (computed !== null && !isSymbol(computed) && + (retHighest ? (computed <= value) : (computed < value))) { + low = mid + 1; + } else { + high = mid; + } + } + return high; + } + return baseSortedIndexBy(array, value, identity, retHighest); + } + + /** + * The base implementation of `_.sortedIndexBy` and `_.sortedLastIndexBy` + * which invokes `iteratee` for `value` and each element of `array` to compute + * their sort ranking. The iteratee is invoked with one argument; (value). + * + * @private + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @param {Function} iteratee The iteratee invoked per element. + * @param {boolean} [retHighest] Specify returning the highest qualified index. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + */ + function baseSortedIndexBy(array, value, iteratee, retHighest) { + var low = 0, + high = array == null ? 0 : array.length; + if (high === 0) { + return 0; + } + + value = iteratee(value); + var valIsNaN = value !== value, + valIsNull = value === null, + valIsSymbol = isSymbol(value), + valIsUndefined = value === undefined; + + while (low < high) { + var mid = nativeFloor((low + high) / 2), + computed = iteratee(array[mid]), + othIsDefined = computed !== undefined, + othIsNull = computed === null, + othIsReflexive = computed === computed, + othIsSymbol = isSymbol(computed); + + if (valIsNaN) { + var setLow = retHighest || othIsReflexive; + } else if (valIsUndefined) { + setLow = othIsReflexive && (retHighest || othIsDefined); + } else if (valIsNull) { + setLow = othIsReflexive && othIsDefined && (retHighest || !othIsNull); + } else if (valIsSymbol) { + setLow = othIsReflexive && othIsDefined && !othIsNull && (retHighest || !othIsSymbol); + } else if (othIsNull || othIsSymbol) { + setLow = false; + } else { + setLow = retHighest ? (computed <= value) : (computed < value); + } + if (setLow) { + low = mid + 1; + } else { + high = mid; + } + } + return nativeMin(high, MAX_ARRAY_INDEX); + } + + /** + * The base implementation of `_.sortedUniq` and `_.sortedUniqBy` without + * support for iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @returns {Array} Returns the new duplicate free array. + */ + function baseSortedUniq(array, iteratee) { + var index = -1, + length = array.length, + resIndex = 0, + result = []; + + while (++index < length) { + var value = array[index], + computed = iteratee ? iteratee(value) : value; + + if (!index || !eq(computed, seen)) { + var seen = computed; + result[resIndex++] = value === 0 ? 0 : value; + } + } + return result; + } + + /** + * The base implementation of `_.toNumber` which doesn't ensure correct + * conversions of binary, hexadecimal, or octal string values. + * + * @private + * @param {*} value The value to process. + * @returns {number} Returns the number. + */ + function baseToNumber(value) { + if (typeof value == 'number') { + return value; + } + if (isSymbol(value)) { + return NAN; + } + return +value; + } + + /** + * The base implementation of `_.toString` which doesn't convert nullish + * values to empty strings. + * + * @private + * @param {*} value The value to process. + * @returns {string} Returns the string. + */ + function baseToString(value) { + // Exit early for strings to avoid a performance hit in some environments. + if (typeof value == 'string') { + return value; + } + if (isArray(value)) { + // Recursively convert values (susceptible to call stack limits). + return arrayMap(value, baseToString) + ''; + } + if (isSymbol(value)) { + return symbolToString ? symbolToString.call(value) : ''; + } + var result = (value + ''); + return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; + } + + /** + * The base implementation of `_.uniqBy` without support for iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new duplicate free array. + */ + function baseUniq(array, iteratee, comparator) { + var index = -1, + includes = arrayIncludes, + length = array.length, + isCommon = true, + result = [], + seen = result; + + if (comparator) { + isCommon = false; + includes = arrayIncludesWith; + } + else if (length >= LARGE_ARRAY_SIZE) { + var set = iteratee ? null : createSet(array); + if (set) { + return setToArray(set); + } + isCommon = false; + includes = cacheHas; + seen = new SetCache; + } + else { + seen = iteratee ? [] : result; + } + outer: + while (++index < length) { + var value = array[index], + computed = iteratee ? iteratee(value) : value; + + value = (comparator || value !== 0) ? value : 0; + if (isCommon && computed === computed) { + var seenIndex = seen.length; + while (seenIndex--) { + if (seen[seenIndex] === computed) { + continue outer; + } + } + if (iteratee) { + seen.push(computed); + } + result.push(value); + } + else if (!includes(seen, computed, comparator)) { + if (seen !== result) { + seen.push(computed); + } + result.push(value); + } + } + return result; + } + + /** + * The base implementation of `_.unset`. + * + * @private + * @param {Object} object The object to modify. + * @param {Array|string} path The property path to unset. + * @returns {boolean} Returns `true` if the property is deleted, else `false`. + */ + function baseUnset(object, path) { + path = castPath(path, object); + object = parent(object, path); + return object == null || delete object[toKey(last(path))]; + } + + /** + * The base implementation of `_.update`. + * + * @private + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to update. + * @param {Function} updater The function to produce the updated value. + * @param {Function} [customizer] The function to customize path creation. + * @returns {Object} Returns `object`. + */ + function baseUpdate(object, path, updater, customizer) { + return baseSet(object, path, updater(baseGet(object, path)), customizer); + } + + /** + * The base implementation of methods like `_.dropWhile` and `_.takeWhile` + * without support for iteratee shorthands. + * + * @private + * @param {Array} array The array to query. + * @param {Function} predicate The function invoked per iteration. + * @param {boolean} [isDrop] Specify dropping elements instead of taking them. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Array} Returns the slice of `array`. + */ + function baseWhile(array, predicate, isDrop, fromRight) { + var length = array.length, + index = fromRight ? length : -1; + + while ((fromRight ? index-- : ++index < length) && + predicate(array[index], index, array)) {} + + return isDrop + ? baseSlice(array, (fromRight ? 0 : index), (fromRight ? index + 1 : length)) + : baseSlice(array, (fromRight ? index + 1 : 0), (fromRight ? length : index)); + } + + /** + * The base implementation of `wrapperValue` which returns the result of + * performing a sequence of actions on the unwrapped `value`, where each + * successive action is supplied the return value of the previous. + * + * @private + * @param {*} value The unwrapped value. + * @param {Array} actions Actions to perform to resolve the unwrapped value. + * @returns {*} Returns the resolved value. + */ + function baseWrapperValue(value, actions) { + var result = value; + if (result instanceof LazyWrapper) { + result = result.value(); + } + return arrayReduce(actions, function(result, action) { + return action.func.apply(action.thisArg, arrayPush([result], action.args)); + }, result); + } + + /** + * The base implementation of methods like `_.xor`, without support for + * iteratee shorthands, that accepts an array of arrays to inspect. + * + * @private + * @param {Array} arrays The arrays to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of values. + */ + function baseXor(arrays, iteratee, comparator) { + var length = arrays.length; + if (length < 2) { + return length ? baseUniq(arrays[0]) : []; + } + var index = -1, + result = Array(length); + + while (++index < length) { + var array = arrays[index], + othIndex = -1; + + while (++othIndex < length) { + if (othIndex != index) { + result[index] = baseDifference(result[index] || array, arrays[othIndex], iteratee, comparator); + } + } + } + return baseUniq(baseFlatten(result, 1), iteratee, comparator); + } + + /** + * This base implementation of `_.zipObject` which assigns values using `assignFunc`. + * + * @private + * @param {Array} props The property identifiers. + * @param {Array} values The property values. + * @param {Function} assignFunc The function to assign values. + * @returns {Object} Returns the new object. + */ + function baseZipObject(props, values, assignFunc) { + var index = -1, + length = props.length, + valsLength = values.length, + result = {}; + + while (++index < length) { + var value = index < valsLength ? values[index] : undefined; + assignFunc(result, props[index], value); + } + return result; + } + + /** + * Casts `value` to an empty array if it's not an array like object. + * + * @private + * @param {*} value The value to inspect. + * @returns {Array|Object} Returns the cast array-like object. + */ + function castArrayLikeObject(value) { + return isArrayLikeObject(value) ? value : []; + } + + /** + * Casts `value` to `identity` if it's not a function. + * + * @private + * @param {*} value The value to inspect. + * @returns {Function} Returns cast function. + */ + function castFunction(value) { + return typeof value == 'function' ? value : identity; + } + + /** + * Casts `value` to a path array if it's not one. + * + * @private + * @param {*} value The value to inspect. + * @param {Object} [object] The object to query keys on. + * @returns {Array} Returns the cast property path array. + */ + function castPath(value, object) { + if (isArray(value)) { + return value; + } + return isKey(value, object) ? [value] : stringToPath(toString(value)); + } + + /** + * A `baseRest` alias which can be replaced with `identity` by module + * replacement plugins. + * + * @private + * @type {Function} + * @param {Function} func The function to apply a rest parameter to. + * @returns {Function} Returns the new function. + */ + var castRest = baseRest; + + /** + * Casts `array` to a slice if it's needed. + * + * @private + * @param {Array} array The array to inspect. + * @param {number} start The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns the cast slice. + */ + function castSlice(array, start, end) { + var length = array.length; + end = end === undefined ? length : end; + return (!start && end >= length) ? array : baseSlice(array, start, end); + } + + /** + * A simple wrapper around the global [`clearTimeout`](https://mdn.io/clearTimeout). + * + * @private + * @param {number|Object} id The timer id or timeout object of the timer to clear. + */ + var clearTimeout = ctxClearTimeout || function(id) { + return root.clearTimeout(id); + }; + + /** + * Creates a clone of `buffer`. + * + * @private + * @param {Buffer} buffer The buffer to clone. + * @param {boolean} [isDeep] Specify a deep clone. + * @returns {Buffer} Returns the cloned buffer. + */ + function cloneBuffer(buffer, isDeep) { + if (isDeep) { + return buffer.slice(); + } + var length = buffer.length, + result = allocUnsafe ? allocUnsafe(length) : new buffer.constructor(length); + + buffer.copy(result); + return result; + } + + /** + * Creates a clone of `arrayBuffer`. + * + * @private + * @param {ArrayBuffer} arrayBuffer The array buffer to clone. + * @returns {ArrayBuffer} Returns the cloned array buffer. + */ + function cloneArrayBuffer(arrayBuffer) { + var result = new arrayBuffer.constructor(arrayBuffer.byteLength); + new Uint8Array(result).set(new Uint8Array(arrayBuffer)); + return result; + } + + /** + * Creates a clone of `dataView`. + * + * @private + * @param {Object} dataView The data view to clone. + * @param {boolean} [isDeep] Specify a deep clone. + * @returns {Object} Returns the cloned data view. + */ + function cloneDataView(dataView, isDeep) { + var buffer = isDeep ? cloneArrayBuffer(dataView.buffer) : dataView.buffer; + return new dataView.constructor(buffer, dataView.byteOffset, dataView.byteLength); + } + + /** + * Creates a clone of `regexp`. + * + * @private + * @param {Object} regexp The regexp to clone. + * @returns {Object} Returns the cloned regexp. + */ + function cloneRegExp(regexp) { + var result = new regexp.constructor(regexp.source, reFlags.exec(regexp)); + result.lastIndex = regexp.lastIndex; + return result; + } + + /** + * Creates a clone of the `symbol` object. + * + * @private + * @param {Object} symbol The symbol object to clone. + * @returns {Object} Returns the cloned symbol object. + */ + function cloneSymbol(symbol) { + return symbolValueOf ? Object(symbolValueOf.call(symbol)) : {}; + } + + /** + * Creates a clone of `typedArray`. + * + * @private + * @param {Object} typedArray The typed array to clone. + * @param {boolean} [isDeep] Specify a deep clone. + * @returns {Object} Returns the cloned typed array. + */ + function cloneTypedArray(typedArray, isDeep) { + var buffer = isDeep ? cloneArrayBuffer(typedArray.buffer) : typedArray.buffer; + return new typedArray.constructor(buffer, typedArray.byteOffset, typedArray.length); + } + + /** + * Compares values to sort them in ascending order. + * + * @private + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {number} Returns the sort order indicator for `value`. + */ + function compareAscending(value, other) { + if (value !== other) { + var valIsDefined = value !== undefined, + valIsNull = value === null, + valIsReflexive = value === value, + valIsSymbol = isSymbol(value); + + var othIsDefined = other !== undefined, + othIsNull = other === null, + othIsReflexive = other === other, + othIsSymbol = isSymbol(other); + + if ((!othIsNull && !othIsSymbol && !valIsSymbol && value > other) || + (valIsSymbol && othIsDefined && othIsReflexive && !othIsNull && !othIsSymbol) || + (valIsNull && othIsDefined && othIsReflexive) || + (!valIsDefined && othIsReflexive) || + !valIsReflexive) { + return 1; + } + if ((!valIsNull && !valIsSymbol && !othIsSymbol && value < other) || + (othIsSymbol && valIsDefined && valIsReflexive && !valIsNull && !valIsSymbol) || + (othIsNull && valIsDefined && valIsReflexive) || + (!othIsDefined && valIsReflexive) || + !othIsReflexive) { + return -1; + } + } + return 0; + } + + /** + * Used by `_.orderBy` to compare multiple properties of a value to another + * and stable sort them. + * + * If `orders` is unspecified, all values are sorted in ascending order. Otherwise, + * specify an order of "desc" for descending or "asc" for ascending sort order + * of corresponding values. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {boolean[]|string[]} orders The order to sort by for each property. + * @returns {number} Returns the sort order indicator for `object`. + */ + function compareMultiple(object, other, orders) { + var index = -1, + objCriteria = object.criteria, + othCriteria = other.criteria, + length = objCriteria.length, + ordersLength = orders.length; + + while (++index < length) { + var result = compareAscending(objCriteria[index], othCriteria[index]); + if (result) { + if (index >= ordersLength) { + return result; + } + var order = orders[index]; + return result * (order == 'desc' ? -1 : 1); + } + } + // Fixes an `Array#sort` bug in the JS engine embedded in Adobe applications + // that causes it, under certain circumstances, to provide the same value for + // `object` and `other`. See https://github.com/jashkenas/underscore/pull/1247 + // for more details. + // + // This also ensures a stable sort in V8 and other engines. + // See https://bugs.chromium.org/p/v8/issues/detail?id=90 for more details. + return object.index - other.index; + } + + /** + * Creates an array that is the composition of partially applied arguments, + * placeholders, and provided arguments into a single array of arguments. + * + * @private + * @param {Array} args The provided arguments. + * @param {Array} partials The arguments to prepend to those provided. + * @param {Array} holders The `partials` placeholder indexes. + * @params {boolean} [isCurried] Specify composing for a curried function. + * @returns {Array} Returns the new array of composed arguments. + */ + function composeArgs(args, partials, holders, isCurried) { + var argsIndex = -1, + argsLength = args.length, + holdersLength = holders.length, + leftIndex = -1, + leftLength = partials.length, + rangeLength = nativeMax(argsLength - holdersLength, 0), + result = Array(leftLength + rangeLength), + isUncurried = !isCurried; + + while (++leftIndex < leftLength) { + result[leftIndex] = partials[leftIndex]; + } + while (++argsIndex < holdersLength) { + if (isUncurried || argsIndex < argsLength) { + result[holders[argsIndex]] = args[argsIndex]; + } + } + while (rangeLength--) { + result[leftIndex++] = args[argsIndex++]; + } + return result; + } + + /** + * This function is like `composeArgs` except that the arguments composition + * is tailored for `_.partialRight`. + * + * @private + * @param {Array} args The provided arguments. + * @param {Array} partials The arguments to append to those provided. + * @param {Array} holders The `partials` placeholder indexes. + * @params {boolean} [isCurried] Specify composing for a curried function. + * @returns {Array} Returns the new array of composed arguments. + */ + function composeArgsRight(args, partials, holders, isCurried) { + var argsIndex = -1, + argsLength = args.length, + holdersIndex = -1, + holdersLength = holders.length, + rightIndex = -1, + rightLength = partials.length, + rangeLength = nativeMax(argsLength - holdersLength, 0), + result = Array(rangeLength + rightLength), + isUncurried = !isCurried; + + while (++argsIndex < rangeLength) { + result[argsIndex] = args[argsIndex]; + } + var offset = argsIndex; + while (++rightIndex < rightLength) { + result[offset + rightIndex] = partials[rightIndex]; + } + while (++holdersIndex < holdersLength) { + if (isUncurried || argsIndex < argsLength) { + result[offset + holders[holdersIndex]] = args[argsIndex++]; + } + } + return result; + } + + /** + * Copies the values of `source` to `array`. + * + * @private + * @param {Array} source The array to copy values from. + * @param {Array} [array=[]] The array to copy values to. + * @returns {Array} Returns `array`. + */ + function copyArray(source, array) { + var index = -1, + length = source.length; + + array || (array = Array(length)); + while (++index < length) { + array[index] = source[index]; + } + return array; + } + + /** + * Copies properties of `source` to `object`. + * + * @private + * @param {Object} source The object to copy properties from. + * @param {Array} props The property identifiers to copy. + * @param {Object} [object={}] The object to copy properties to. + * @param {Function} [customizer] The function to customize copied values. + * @returns {Object} Returns `object`. + */ + function copyObject(source, props, object, customizer) { + var isNew = !object; + object || (object = {}); + + var index = -1, + length = props.length; + + while (++index < length) { + var key = props[index]; + + var newValue = customizer + ? customizer(object[key], source[key], key, object, source) + : undefined; + + if (newValue === undefined) { + newValue = source[key]; + } + if (isNew) { + baseAssignValue(object, key, newValue); + } else { + assignValue(object, key, newValue); + } + } + return object; + } + + /** + * Copies own symbols of `source` to `object`. + * + * @private + * @param {Object} source The object to copy symbols from. + * @param {Object} [object={}] The object to copy symbols to. + * @returns {Object} Returns `object`. + */ + function copySymbols(source, object) { + return copyObject(source, getSymbols(source), object); + } + + /** + * Copies own and inherited symbols of `source` to `object`. + * + * @private + * @param {Object} source The object to copy symbols from. + * @param {Object} [object={}] The object to copy symbols to. + * @returns {Object} Returns `object`. + */ + function copySymbolsIn(source, object) { + return copyObject(source, getSymbolsIn(source), object); + } + + /** + * Creates a function like `_.groupBy`. + * + * @private + * @param {Function} setter The function to set accumulator values. + * @param {Function} [initializer] The accumulator object initializer. + * @returns {Function} Returns the new aggregator function. + */ + function createAggregator(setter, initializer) { + return function(collection, iteratee) { + var func = isArray(collection) ? arrayAggregator : baseAggregator, + accumulator = initializer ? initializer() : {}; + + return func(collection, setter, getIteratee(iteratee, 2), accumulator); + }; + } + + /** + * Creates a function like `_.assign`. + * + * @private + * @param {Function} assigner The function to assign values. + * @returns {Function} Returns the new assigner function. + */ + function createAssigner(assigner) { + return baseRest(function(object, sources) { + var index = -1, + length = sources.length, + customizer = length > 1 ? sources[length - 1] : undefined, + guard = length > 2 ? sources[2] : undefined; + + customizer = (assigner.length > 3 && typeof customizer == 'function') + ? (length--, customizer) + : undefined; + + if (guard && isIterateeCall(sources[0], sources[1], guard)) { + customizer = length < 3 ? undefined : customizer; + length = 1; + } + object = Object(object); + while (++index < length) { + var source = sources[index]; + if (source) { + assigner(object, source, index, customizer); + } + } + return object; + }); + } + + /** + * Creates a `baseEach` or `baseEachRight` function. + * + * @private + * @param {Function} eachFunc The function to iterate over a collection. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Function} Returns the new base function. + */ + function createBaseEach(eachFunc, fromRight) { + return function(collection, iteratee) { + if (collection == null) { + return collection; + } + if (!isArrayLike(collection)) { + return eachFunc(collection, iteratee); + } + var length = collection.length, + index = fromRight ? length : -1, + iterable = Object(collection); + + while ((fromRight ? index-- : ++index < length)) { + if (iteratee(iterable[index], index, iterable) === false) { + break; + } + } + return collection; + }; + } + + /** + * Creates a base function for methods like `_.forIn` and `_.forOwn`. + * + * @private + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Function} Returns the new base function. + */ + function createBaseFor(fromRight) { + return function(object, iteratee, keysFunc) { + var index = -1, + iterable = Object(object), + props = keysFunc(object), + length = props.length; + + while (length--) { + var key = props[fromRight ? length : ++index]; + if (iteratee(iterable[key], key, iterable) === false) { + break; + } + } + return object; + }; + } + + /** + * Creates a function that wraps `func` to invoke it with the optional `this` + * binding of `thisArg`. + * + * @private + * @param {Function} func The function to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {*} [thisArg] The `this` binding of `func`. + * @returns {Function} Returns the new wrapped function. + */ + function createBind(func, bitmask, thisArg) { + var isBind = bitmask & WRAP_BIND_FLAG, + Ctor = createCtor(func); + + function wrapper() { + var fn = (this && this !== root && this instanceof wrapper) ? Ctor : func; + return fn.apply(isBind ? thisArg : this, arguments); + } + return wrapper; + } + + /** + * Creates a function like `_.lowerFirst`. + * + * @private + * @param {string} methodName The name of the `String` case method to use. + * @returns {Function} Returns the new case function. + */ + function createCaseFirst(methodName) { + return function(string) { + string = toString(string); + + var strSymbols = hasUnicode(string) + ? stringToArray(string) + : undefined; + + var chr = strSymbols + ? strSymbols[0] + : string.charAt(0); + + var trailing = strSymbols + ? castSlice(strSymbols, 1).join('') + : string.slice(1); + + return chr[methodName]() + trailing; + }; + } + + /** + * Creates a function like `_.camelCase`. + * + * @private + * @param {Function} callback The function to combine each word. + * @returns {Function} Returns the new compounder function. + */ + function createCompounder(callback) { + return function(string) { + return arrayReduce(words(deburr(string).replace(reApos, '')), callback, ''); + }; + } + + /** + * Creates a function that produces an instance of `Ctor` regardless of + * whether it was invoked as part of a `new` expression or by `call` or `apply`. + * + * @private + * @param {Function} Ctor The constructor to wrap. + * @returns {Function} Returns the new wrapped function. + */ + function createCtor(Ctor) { + return function() { + // Use a `switch` statement to work with class constructors. See + // http://ecma-international.org/ecma-262/7.0/#sec-ecmascript-function-objects-call-thisargument-argumentslist + // for more details. + var args = arguments; + switch (args.length) { + case 0: return new Ctor; + case 1: return new Ctor(args[0]); + case 2: return new Ctor(args[0], args[1]); + case 3: return new Ctor(args[0], args[1], args[2]); + case 4: return new Ctor(args[0], args[1], args[2], args[3]); + case 5: return new Ctor(args[0], args[1], args[2], args[3], args[4]); + case 6: return new Ctor(args[0], args[1], args[2], args[3], args[4], args[5]); + case 7: return new Ctor(args[0], args[1], args[2], args[3], args[4], args[5], args[6]); + } + var thisBinding = baseCreate(Ctor.prototype), + result = Ctor.apply(thisBinding, args); + + // Mimic the constructor's `return` behavior. + // See https://es5.github.io/#x13.2.2 for more details. + return isObject(result) ? result : thisBinding; + }; + } + + /** + * Creates a function that wraps `func` to enable currying. + * + * @private + * @param {Function} func The function to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {number} arity The arity of `func`. + * @returns {Function} Returns the new wrapped function. + */ + function createCurry(func, bitmask, arity) { + var Ctor = createCtor(func); + + function wrapper() { + var length = arguments.length, + args = Array(length), + index = length, + placeholder = getHolder(wrapper); + + while (index--) { + args[index] = arguments[index]; + } + var holders = (length < 3 && args[0] !== placeholder && args[length - 1] !== placeholder) + ? [] + : replaceHolders(args, placeholder); + + length -= holders.length; + if (length < arity) { + return createRecurry( + func, bitmask, createHybrid, wrapper.placeholder, undefined, + args, holders, undefined, undefined, arity - length); + } + var fn = (this && this !== root && this instanceof wrapper) ? Ctor : func; + return apply(fn, this, args); + } + return wrapper; + } + + /** + * Creates a `_.find` or `_.findLast` function. + * + * @private + * @param {Function} findIndexFunc The function to find the collection index. + * @returns {Function} Returns the new find function. + */ + function createFind(findIndexFunc) { + return function(collection, predicate, fromIndex) { + var iterable = Object(collection); + if (!isArrayLike(collection)) { + var iteratee = getIteratee(predicate, 3); + collection = keys(collection); + predicate = function(key) { return iteratee(iterable[key], key, iterable); }; + } + var index = findIndexFunc(collection, predicate, fromIndex); + return index > -1 ? iterable[iteratee ? collection[index] : index] : undefined; + }; + } + + /** + * Creates a `_.flow` or `_.flowRight` function. + * + * @private + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Function} Returns the new flow function. + */ + function createFlow(fromRight) { + return flatRest(function(funcs) { + var length = funcs.length, + index = length, + prereq = LodashWrapper.prototype.thru; + + if (fromRight) { + funcs.reverse(); + } + while (index--) { + var func = funcs[index]; + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + if (prereq && !wrapper && getFuncName(func) == 'wrapper') { + var wrapper = new LodashWrapper([], true); + } + } + index = wrapper ? index : length; + while (++index < length) { + func = funcs[index]; + + var funcName = getFuncName(func), + data = funcName == 'wrapper' ? getData(func) : undefined; + + if (data && isLaziable(data[0]) && + data[1] == (WRAP_ARY_FLAG | WRAP_CURRY_FLAG | WRAP_PARTIAL_FLAG | WRAP_REARG_FLAG) && + !data[4].length && data[9] == 1 + ) { + wrapper = wrapper[getFuncName(data[0])].apply(wrapper, data[3]); + } else { + wrapper = (func.length == 1 && isLaziable(func)) + ? wrapper[funcName]() + : wrapper.thru(func); + } + } + return function() { + var args = arguments, + value = args[0]; + + if (wrapper && args.length == 1 && isArray(value)) { + return wrapper.plant(value).value(); + } + var index = 0, + result = length ? funcs[index].apply(this, args) : value; + + while (++index < length) { + result = funcs[index].call(this, result); + } + return result; + }; + }); + } + + /** + * Creates a function that wraps `func` to invoke it with optional `this` + * binding of `thisArg`, partial application, and currying. + * + * @private + * @param {Function|string} func The function or method name to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {*} [thisArg] The `this` binding of `func`. + * @param {Array} [partials] The arguments to prepend to those provided to + * the new function. + * @param {Array} [holders] The `partials` placeholder indexes. + * @param {Array} [partialsRight] The arguments to append to those provided + * to the new function. + * @param {Array} [holdersRight] The `partialsRight` placeholder indexes. + * @param {Array} [argPos] The argument positions of the new function. + * @param {number} [ary] The arity cap of `func`. + * @param {number} [arity] The arity of `func`. + * @returns {Function} Returns the new wrapped function. + */ + function createHybrid(func, bitmask, thisArg, partials, holders, partialsRight, holdersRight, argPos, ary, arity) { + var isAry = bitmask & WRAP_ARY_FLAG, + isBind = bitmask & WRAP_BIND_FLAG, + isBindKey = bitmask & WRAP_BIND_KEY_FLAG, + isCurried = bitmask & (WRAP_CURRY_FLAG | WRAP_CURRY_RIGHT_FLAG), + isFlip = bitmask & WRAP_FLIP_FLAG, + Ctor = isBindKey ? undefined : createCtor(func); + + function wrapper() { + var length = arguments.length, + args = Array(length), + index = length; + + while (index--) { + args[index] = arguments[index]; + } + if (isCurried) { + var placeholder = getHolder(wrapper), + holdersCount = countHolders(args, placeholder); + } + if (partials) { + args = composeArgs(args, partials, holders, isCurried); + } + if (partialsRight) { + args = composeArgsRight(args, partialsRight, holdersRight, isCurried); + } + length -= holdersCount; + if (isCurried && length < arity) { + var newHolders = replaceHolders(args, placeholder); + return createRecurry( + func, bitmask, createHybrid, wrapper.placeholder, thisArg, + args, newHolders, argPos, ary, arity - length + ); + } + var thisBinding = isBind ? thisArg : this, + fn = isBindKey ? thisBinding[func] : func; + + length = args.length; + if (argPos) { + args = reorder(args, argPos); + } else if (isFlip && length > 1) { + args.reverse(); + } + if (isAry && ary < length) { + args.length = ary; + } + if (this && this !== root && this instanceof wrapper) { + fn = Ctor || createCtor(fn); + } + return fn.apply(thisBinding, args); + } + return wrapper; + } + + /** + * Creates a function like `_.invertBy`. + * + * @private + * @param {Function} setter The function to set accumulator values. + * @param {Function} toIteratee The function to resolve iteratees. + * @returns {Function} Returns the new inverter function. + */ + function createInverter(setter, toIteratee) { + return function(object, iteratee) { + return baseInverter(object, setter, toIteratee(iteratee), {}); + }; + } + + /** + * Creates a function that performs a mathematical operation on two values. + * + * @private + * @param {Function} operator The function to perform the operation. + * @param {number} [defaultValue] The value used for `undefined` arguments. + * @returns {Function} Returns the new mathematical operation function. + */ + function createMathOperation(operator, defaultValue) { + return function(value, other) { + var result; + if (value === undefined && other === undefined) { + return defaultValue; + } + if (value !== undefined) { + result = value; + } + if (other !== undefined) { + if (result === undefined) { + return other; + } + if (typeof value == 'string' || typeof other == 'string') { + value = baseToString(value); + other = baseToString(other); + } else { + value = baseToNumber(value); + other = baseToNumber(other); + } + result = operator(value, other); + } + return result; + }; + } + + /** + * Creates a function like `_.over`. + * + * @private + * @param {Function} arrayFunc The function to iterate over iteratees. + * @returns {Function} Returns the new over function. + */ + function createOver(arrayFunc) { + return flatRest(function(iteratees) { + iteratees = arrayMap(iteratees, baseUnary(getIteratee())); + return baseRest(function(args) { + var thisArg = this; + return arrayFunc(iteratees, function(iteratee) { + return apply(iteratee, thisArg, args); + }); + }); + }); + } + + /** + * Creates the padding for `string` based on `length`. The `chars` string + * is truncated if the number of characters exceeds `length`. + * + * @private + * @param {number} length The padding length. + * @param {string} [chars=' '] The string used as padding. + * @returns {string} Returns the padding for `string`. + */ + function createPadding(length, chars) { + chars = chars === undefined ? ' ' : baseToString(chars); + + var charsLength = chars.length; + if (charsLength < 2) { + return charsLength ? baseRepeat(chars, length) : chars; + } + var result = baseRepeat(chars, nativeCeil(length / stringSize(chars))); + return hasUnicode(chars) + ? castSlice(stringToArray(result), 0, length).join('') + : result.slice(0, length); + } + + /** + * Creates a function that wraps `func` to invoke it with the `this` binding + * of `thisArg` and `partials` prepended to the arguments it receives. + * + * @private + * @param {Function} func The function to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {*} thisArg The `this` binding of `func`. + * @param {Array} partials The arguments to prepend to those provided to + * the new function. + * @returns {Function} Returns the new wrapped function. + */ + function createPartial(func, bitmask, thisArg, partials) { + var isBind = bitmask & WRAP_BIND_FLAG, + Ctor = createCtor(func); + + function wrapper() { + var argsIndex = -1, + argsLength = arguments.length, + leftIndex = -1, + leftLength = partials.length, + args = Array(leftLength + argsLength), + fn = (this && this !== root && this instanceof wrapper) ? Ctor : func; + + while (++leftIndex < leftLength) { + args[leftIndex] = partials[leftIndex]; + } + while (argsLength--) { + args[leftIndex++] = arguments[++argsIndex]; + } + return apply(fn, isBind ? thisArg : this, args); + } + return wrapper; + } + + /** + * Creates a `_.range` or `_.rangeRight` function. + * + * @private + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {Function} Returns the new range function. + */ + function createRange(fromRight) { + return function(start, end, step) { + if (step && typeof step != 'number' && isIterateeCall(start, end, step)) { + end = step = undefined; + } + // Ensure the sign of `-0` is preserved. + start = toFinite(start); + if (end === undefined) { + end = start; + start = 0; + } else { + end = toFinite(end); + } + step = step === undefined ? (start < end ? 1 : -1) : toFinite(step); + return baseRange(start, end, step, fromRight); + }; + } + + /** + * Creates a function that performs a relational operation on two values. + * + * @private + * @param {Function} operator The function to perform the operation. + * @returns {Function} Returns the new relational operation function. + */ + function createRelationalOperation(operator) { + return function(value, other) { + if (!(typeof value == 'string' && typeof other == 'string')) { + value = toNumber(value); + other = toNumber(other); + } + return operator(value, other); + }; + } + + /** + * Creates a function that wraps `func` to continue currying. + * + * @private + * @param {Function} func The function to wrap. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @param {Function} wrapFunc The function to create the `func` wrapper. + * @param {*} placeholder The placeholder value. + * @param {*} [thisArg] The `this` binding of `func`. + * @param {Array} [partials] The arguments to prepend to those provided to + * the new function. + * @param {Array} [holders] The `partials` placeholder indexes. + * @param {Array} [argPos] The argument positions of the new function. + * @param {number} [ary] The arity cap of `func`. + * @param {number} [arity] The arity of `func`. + * @returns {Function} Returns the new wrapped function. + */ + function createRecurry(func, bitmask, wrapFunc, placeholder, thisArg, partials, holders, argPos, ary, arity) { + var isCurry = bitmask & WRAP_CURRY_FLAG, + newHolders = isCurry ? holders : undefined, + newHoldersRight = isCurry ? undefined : holders, + newPartials = isCurry ? partials : undefined, + newPartialsRight = isCurry ? undefined : partials; + + bitmask |= (isCurry ? WRAP_PARTIAL_FLAG : WRAP_PARTIAL_RIGHT_FLAG); + bitmask &= ~(isCurry ? WRAP_PARTIAL_RIGHT_FLAG : WRAP_PARTIAL_FLAG); + + if (!(bitmask & WRAP_CURRY_BOUND_FLAG)) { + bitmask &= ~(WRAP_BIND_FLAG | WRAP_BIND_KEY_FLAG); + } + var newData = [ + func, bitmask, thisArg, newPartials, newHolders, newPartialsRight, + newHoldersRight, argPos, ary, arity + ]; + + var result = wrapFunc.apply(undefined, newData); + if (isLaziable(func)) { + setData(result, newData); + } + result.placeholder = placeholder; + return setWrapToString(result, func, bitmask); + } + + /** + * Creates a function like `_.round`. + * + * @private + * @param {string} methodName The name of the `Math` method to use when rounding. + * @returns {Function} Returns the new round function. + */ + function createRound(methodName) { + var func = Math[methodName]; + return function(number, precision) { + number = toNumber(number); + precision = precision == null ? 0 : nativeMin(toInteger(precision), 292); + if (precision && nativeIsFinite(number)) { + // Shift with exponential notation to avoid floating-point issues. + // See [MDN](https://mdn.io/round#Examples) for more details. + var pair = (toString(number) + 'e').split('e'), + value = func(pair[0] + 'e' + (+pair[1] + precision)); + + pair = (toString(value) + 'e').split('e'); + return +(pair[0] + 'e' + (+pair[1] - precision)); + } + return func(number); + }; + } + + /** + * Creates a set object of `values`. + * + * @private + * @param {Array} values The values to add to the set. + * @returns {Object} Returns the new set. + */ + var createSet = !(Set && (1 / setToArray(new Set([,-0]))[1]) == INFINITY) ? noop : function(values) { + return new Set(values); + }; + + /** + * Creates a `_.toPairs` or `_.toPairsIn` function. + * + * @private + * @param {Function} keysFunc The function to get the keys of a given object. + * @returns {Function} Returns the new pairs function. + */ + function createToPairs(keysFunc) { + return function(object) { + var tag = getTag(object); + if (tag == mapTag) { + return mapToArray(object); + } + if (tag == setTag) { + return setToPairs(object); + } + return baseToPairs(object, keysFunc(object)); + }; + } + + /** + * Creates a function that either curries or invokes `func` with optional + * `this` binding and partially applied arguments. + * + * @private + * @param {Function|string} func The function or method name to wrap. + * @param {number} bitmask The bitmask flags. + * 1 - `_.bind` + * 2 - `_.bindKey` + * 4 - `_.curry` or `_.curryRight` of a bound function + * 8 - `_.curry` + * 16 - `_.curryRight` + * 32 - `_.partial` + * 64 - `_.partialRight` + * 128 - `_.rearg` + * 256 - `_.ary` + * 512 - `_.flip` + * @param {*} [thisArg] The `this` binding of `func`. + * @param {Array} [partials] The arguments to be partially applied. + * @param {Array} [holders] The `partials` placeholder indexes. + * @param {Array} [argPos] The argument positions of the new function. + * @param {number} [ary] The arity cap of `func`. + * @param {number} [arity] The arity of `func`. + * @returns {Function} Returns the new wrapped function. + */ + function createWrap(func, bitmask, thisArg, partials, holders, argPos, ary, arity) { + var isBindKey = bitmask & WRAP_BIND_KEY_FLAG; + if (!isBindKey && typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + var length = partials ? partials.length : 0; + if (!length) { + bitmask &= ~(WRAP_PARTIAL_FLAG | WRAP_PARTIAL_RIGHT_FLAG); + partials = holders = undefined; + } + ary = ary === undefined ? ary : nativeMax(toInteger(ary), 0); + arity = arity === undefined ? arity : toInteger(arity); + length -= holders ? holders.length : 0; + + if (bitmask & WRAP_PARTIAL_RIGHT_FLAG) { + var partialsRight = partials, + holdersRight = holders; + + partials = holders = undefined; + } + var data = isBindKey ? undefined : getData(func); + + var newData = [ + func, bitmask, thisArg, partials, holders, partialsRight, holdersRight, + argPos, ary, arity + ]; + + if (data) { + mergeData(newData, data); + } + func = newData[0]; + bitmask = newData[1]; + thisArg = newData[2]; + partials = newData[3]; + holders = newData[4]; + arity = newData[9] = newData[9] === undefined + ? (isBindKey ? 0 : func.length) + : nativeMax(newData[9] - length, 0); + + if (!arity && bitmask & (WRAP_CURRY_FLAG | WRAP_CURRY_RIGHT_FLAG)) { + bitmask &= ~(WRAP_CURRY_FLAG | WRAP_CURRY_RIGHT_FLAG); + } + if (!bitmask || bitmask == WRAP_BIND_FLAG) { + var result = createBind(func, bitmask, thisArg); + } else if (bitmask == WRAP_CURRY_FLAG || bitmask == WRAP_CURRY_RIGHT_FLAG) { + result = createCurry(func, bitmask, arity); + } else if ((bitmask == WRAP_PARTIAL_FLAG || bitmask == (WRAP_BIND_FLAG | WRAP_PARTIAL_FLAG)) && !holders.length) { + result = createPartial(func, bitmask, thisArg, partials); + } else { + result = createHybrid.apply(undefined, newData); + } + var setter = data ? baseSetData : setData; + return setWrapToString(setter(result, newData), func, bitmask); + } + + /** + * Used by `_.defaults` to customize its `_.assignIn` use to assign properties + * of source objects to the destination object for all destination properties + * that resolve to `undefined`. + * + * @private + * @param {*} objValue The destination value. + * @param {*} srcValue The source value. + * @param {string} key The key of the property to assign. + * @param {Object} object The parent object of `objValue`. + * @returns {*} Returns the value to assign. + */ + function customDefaultsAssignIn(objValue, srcValue, key, object) { + if (objValue === undefined || + (eq(objValue, objectProto[key]) && !hasOwnProperty.call(object, key))) { + return srcValue; + } + return objValue; + } + + /** + * Used by `_.defaultsDeep` to customize its `_.merge` use to merge source + * objects into destination objects that are passed thru. + * + * @private + * @param {*} objValue The destination value. + * @param {*} srcValue The source value. + * @param {string} key The key of the property to merge. + * @param {Object} object The parent object of `objValue`. + * @param {Object} source The parent object of `srcValue`. + * @param {Object} [stack] Tracks traversed source values and their merged + * counterparts. + * @returns {*} Returns the value to assign. + */ + function customDefaultsMerge(objValue, srcValue, key, object, source, stack) { + if (isObject(objValue) && isObject(srcValue)) { + // Recursively merge objects and arrays (susceptible to call stack limits). + stack.set(srcValue, objValue); + baseMerge(objValue, srcValue, undefined, customDefaultsMerge, stack); + stack['delete'](srcValue); + } + return objValue; + } + + /** + * Used by `_.omit` to customize its `_.cloneDeep` use to only clone plain + * objects. + * + * @private + * @param {*} value The value to inspect. + * @param {string} key The key of the property to inspect. + * @returns {*} Returns the uncloned value or `undefined` to defer cloning to `_.cloneDeep`. + */ + function customOmitClone(value) { + return isPlainObject(value) ? undefined : value; + } + + /** + * A specialized version of `baseIsEqualDeep` for arrays with support for + * partial deep comparisons. + * + * @private + * @param {Array} array The array to compare. + * @param {Array} other The other array to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} stack Tracks traversed `array` and `other` objects. + * @returns {boolean} Returns `true` if the arrays are equivalent, else `false`. + */ + function equalArrays(array, other, bitmask, customizer, equalFunc, stack) { + var isPartial = bitmask & COMPARE_PARTIAL_FLAG, + arrLength = array.length, + othLength = other.length; + + if (arrLength != othLength && !(isPartial && othLength > arrLength)) { + return false; + } + // Check that cyclic values are equal. + var arrStacked = stack.get(array); + var othStacked = stack.get(other); + if (arrStacked && othStacked) { + return arrStacked == other && othStacked == array; + } + var index = -1, + result = true, + seen = (bitmask & COMPARE_UNORDERED_FLAG) ? new SetCache : undefined; + + stack.set(array, other); + stack.set(other, array); + + // Ignore non-index properties. + while (++index < arrLength) { + var arrValue = array[index], + othValue = other[index]; + + if (customizer) { + var compared = isPartial + ? customizer(othValue, arrValue, index, other, array, stack) + : customizer(arrValue, othValue, index, array, other, stack); + } + if (compared !== undefined) { + if (compared) { + continue; + } + result = false; + break; + } + // Recursively compare arrays (susceptible to call stack limits). + if (seen) { + if (!arraySome(other, function(othValue, othIndex) { + if (!cacheHas(seen, othIndex) && + (arrValue === othValue || equalFunc(arrValue, othValue, bitmask, customizer, stack))) { + return seen.push(othIndex); + } + })) { + result = false; + break; + } + } else if (!( + arrValue === othValue || + equalFunc(arrValue, othValue, bitmask, customizer, stack) + )) { + result = false; + break; + } + } + stack['delete'](array); + stack['delete'](other); + return result; + } + + /** + * A specialized version of `baseIsEqualDeep` for comparing objects of + * the same `toStringTag`. + * + * **Note:** This function only supports comparing values with tags of + * `Boolean`, `Date`, `Error`, `Number`, `RegExp`, or `String`. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {string} tag The `toStringTag` of the objects to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} stack Tracks traversed `object` and `other` objects. + * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. + */ + function equalByTag(object, other, tag, bitmask, customizer, equalFunc, stack) { + switch (tag) { + case dataViewTag: + if ((object.byteLength != other.byteLength) || + (object.byteOffset != other.byteOffset)) { + return false; + } + object = object.buffer; + other = other.buffer; + + case arrayBufferTag: + if ((object.byteLength != other.byteLength) || + !equalFunc(new Uint8Array(object), new Uint8Array(other))) { + return false; + } + return true; + + case boolTag: + case dateTag: + case numberTag: + // Coerce booleans to `1` or `0` and dates to milliseconds. + // Invalid dates are coerced to `NaN`. + return eq(+object, +other); + + case errorTag: + return object.name == other.name && object.message == other.message; + + case regexpTag: + case stringTag: + // Coerce regexes to strings and treat strings, primitives and objects, + // as equal. See http://www.ecma-international.org/ecma-262/7.0/#sec-regexp.prototype.tostring + // for more details. + return object == (other + ''); + + case mapTag: + var convert = mapToArray; + + case setTag: + var isPartial = bitmask & COMPARE_PARTIAL_FLAG; + convert || (convert = setToArray); + + if (object.size != other.size && !isPartial) { + return false; + } + // Assume cyclic values are equal. + var stacked = stack.get(object); + if (stacked) { + return stacked == other; + } + bitmask |= COMPARE_UNORDERED_FLAG; + + // Recursively compare objects (susceptible to call stack limits). + stack.set(object, other); + var result = equalArrays(convert(object), convert(other), bitmask, customizer, equalFunc, stack); + stack['delete'](object); + return result; + + case symbolTag: + if (symbolValueOf) { + return symbolValueOf.call(object) == symbolValueOf.call(other); + } + } + return false; + } + + /** + * A specialized version of `baseIsEqualDeep` for objects with support for + * partial deep comparisons. + * + * @private + * @param {Object} object The object to compare. + * @param {Object} other The other object to compare. + * @param {number} bitmask The bitmask flags. See `baseIsEqual` for more details. + * @param {Function} customizer The function to customize comparisons. + * @param {Function} equalFunc The function to determine equivalents of values. + * @param {Object} stack Tracks traversed `object` and `other` objects. + * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. + */ + function equalObjects(object, other, bitmask, customizer, equalFunc, stack) { + var isPartial = bitmask & COMPARE_PARTIAL_FLAG, + objProps = getAllKeys(object), + objLength = objProps.length, + othProps = getAllKeys(other), + othLength = othProps.length; + + if (objLength != othLength && !isPartial) { + return false; + } + var index = objLength; + while (index--) { + var key = objProps[index]; + if (!(isPartial ? key in other : hasOwnProperty.call(other, key))) { + return false; + } + } + // Check that cyclic values are equal. + var objStacked = stack.get(object); + var othStacked = stack.get(other); + if (objStacked && othStacked) { + return objStacked == other && othStacked == object; + } + var result = true; + stack.set(object, other); + stack.set(other, object); + + var skipCtor = isPartial; + while (++index < objLength) { + key = objProps[index]; + var objValue = object[key], + othValue = other[key]; + + if (customizer) { + var compared = isPartial + ? customizer(othValue, objValue, key, other, object, stack) + : customizer(objValue, othValue, key, object, other, stack); + } + // Recursively compare objects (susceptible to call stack limits). + if (!(compared === undefined + ? (objValue === othValue || equalFunc(objValue, othValue, bitmask, customizer, stack)) + : compared + )) { + result = false; + break; + } + skipCtor || (skipCtor = key == 'constructor'); + } + if (result && !skipCtor) { + var objCtor = object.constructor, + othCtor = other.constructor; + + // Non `Object` object instances with different constructors are not equal. + if (objCtor != othCtor && + ('constructor' in object && 'constructor' in other) && + !(typeof objCtor == 'function' && objCtor instanceof objCtor && + typeof othCtor == 'function' && othCtor instanceof othCtor)) { + result = false; + } + } + stack['delete'](object); + stack['delete'](other); + return result; + } + + /** + * A specialized version of `baseRest` which flattens the rest array. + * + * @private + * @param {Function} func The function to apply a rest parameter to. + * @returns {Function} Returns the new function. + */ + function flatRest(func) { + return setToString(overRest(func, undefined, flatten), func + ''); + } + + /** + * Creates an array of own enumerable property names and symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names and symbols. + */ + function getAllKeys(object) { + return baseGetAllKeys(object, keys, getSymbols); + } + + /** + * Creates an array of own and inherited enumerable property names and + * symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names and symbols. + */ + function getAllKeysIn(object) { + return baseGetAllKeys(object, keysIn, getSymbolsIn); + } + + /** + * Gets metadata for `func`. + * + * @private + * @param {Function} func The function to query. + * @returns {*} Returns the metadata for `func`. + */ + var getData = !metaMap ? noop : function(func) { + return metaMap.get(func); + }; + + /** + * Gets the name of `func`. + * + * @private + * @param {Function} func The function to query. + * @returns {string} Returns the function name. + */ + function getFuncName(func) { + var result = (func.name + ''), + array = realNames[result], + length = hasOwnProperty.call(realNames, result) ? array.length : 0; + + while (length--) { + var data = array[length], + otherFunc = data.func; + if (otherFunc == null || otherFunc == func) { + return data.name; + } + } + return result; + } + + /** + * Gets the argument placeholder value for `func`. + * + * @private + * @param {Function} func The function to inspect. + * @returns {*} Returns the placeholder value. + */ + function getHolder(func) { + var object = hasOwnProperty.call(lodash, 'placeholder') ? lodash : func; + return object.placeholder; + } + + /** + * Gets the appropriate "iteratee" function. If `_.iteratee` is customized, + * this function returns the custom method, otherwise it returns `baseIteratee`. + * If arguments are provided, the chosen function is invoked with them and + * its result is returned. + * + * @private + * @param {*} [value] The value to convert to an iteratee. + * @param {number} [arity] The arity of the created iteratee. + * @returns {Function} Returns the chosen function or its result. + */ + function getIteratee() { + var result = lodash.iteratee || iteratee; + result = result === iteratee ? baseIteratee : result; + return arguments.length ? result(arguments[0], arguments[1]) : result; + } + + /** + * Gets the data for `map`. + * + * @private + * @param {Object} map The map to query. + * @param {string} key The reference key. + * @returns {*} Returns the map data. + */ + function getMapData(map, key) { + var data = map.__data__; + return isKeyable(key) + ? data[typeof key == 'string' ? 'string' : 'hash'] + : data.map; + } + + /** + * Gets the property names, values, and compare flags of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the match data of `object`. + */ + function getMatchData(object) { + var result = keys(object), + length = result.length; + + while (length--) { + var key = result[length], + value = object[key]; + + result[length] = [key, value, isStrictComparable(value)]; + } + return result; + } + + /** + * Gets the native function at `key` of `object`. + * + * @private + * @param {Object} object The object to query. + * @param {string} key The key of the method to get. + * @returns {*} Returns the function if it's native, else `undefined`. + */ + function getNative(object, key) { + var value = getValue(object, key); + return baseIsNative(value) ? value : undefined; + } + + /** + * A specialized version of `baseGetTag` which ignores `Symbol.toStringTag` values. + * + * @private + * @param {*} value The value to query. + * @returns {string} Returns the raw `toStringTag`. + */ + function getRawTag(value) { + var isOwn = hasOwnProperty.call(value, symToStringTag), + tag = value[symToStringTag]; + + try { + value[symToStringTag] = undefined; + var unmasked = true; + } catch (e) {} + + var result = nativeObjectToString.call(value); + if (unmasked) { + if (isOwn) { + value[symToStringTag] = tag; + } else { + delete value[symToStringTag]; + } + } + return result; + } + + /** + * Creates an array of the own enumerable symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of symbols. + */ + var getSymbols = !nativeGetSymbols ? stubArray : function(object) { + if (object == null) { + return []; + } + object = Object(object); + return arrayFilter(nativeGetSymbols(object), function(symbol) { + return propertyIsEnumerable.call(object, symbol); + }); + }; + + /** + * Creates an array of the own and inherited enumerable symbols of `object`. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of symbols. + */ + var getSymbolsIn = !nativeGetSymbols ? stubArray : function(object) { + var result = []; + while (object) { + arrayPush(result, getSymbols(object)); + object = getPrototype(object); + } + return result; + }; + + /** + * Gets the `toStringTag` of `value`. + * + * @private + * @param {*} value The value to query. + * @returns {string} Returns the `toStringTag`. + */ + var getTag = baseGetTag; + + // Fallback for data views, maps, sets, and weak maps in IE 11 and promises in Node.js < 6. + if ((DataView && getTag(new DataView(new ArrayBuffer(1))) != dataViewTag) || + (Map && getTag(new Map) != mapTag) || + (Promise && getTag(Promise.resolve()) != promiseTag) || + (Set && getTag(new Set) != setTag) || + (WeakMap && getTag(new WeakMap) != weakMapTag)) { + getTag = function(value) { + var result = baseGetTag(value), + Ctor = result == objectTag ? value.constructor : undefined, + ctorString = Ctor ? toSource(Ctor) : ''; + + if (ctorString) { + switch (ctorString) { + case dataViewCtorString: return dataViewTag; + case mapCtorString: return mapTag; + case promiseCtorString: return promiseTag; + case setCtorString: return setTag; + case weakMapCtorString: return weakMapTag; + } + } + return result; + }; + } + + /** + * Gets the view, applying any `transforms` to the `start` and `end` positions. + * + * @private + * @param {number} start The start of the view. + * @param {number} end The end of the view. + * @param {Array} transforms The transformations to apply to the view. + * @returns {Object} Returns an object containing the `start` and `end` + * positions of the view. + */ + function getView(start, end, transforms) { + var index = -1, + length = transforms.length; + + while (++index < length) { + var data = transforms[index], + size = data.size; + + switch (data.type) { + case 'drop': start += size; break; + case 'dropRight': end -= size; break; + case 'take': end = nativeMin(end, start + size); break; + case 'takeRight': start = nativeMax(start, end - size); break; + } + } + return { 'start': start, 'end': end }; + } + + /** + * Extracts wrapper details from the `source` body comment. + * + * @private + * @param {string} source The source to inspect. + * @returns {Array} Returns the wrapper details. + */ + function getWrapDetails(source) { + var match = source.match(reWrapDetails); + return match ? match[1].split(reSplitDetails) : []; + } + + /** + * Checks if `path` exists on `object`. + * + * @private + * @param {Object} object The object to query. + * @param {Array|string} path The path to check. + * @param {Function} hasFunc The function to check properties. + * @returns {boolean} Returns `true` if `path` exists, else `false`. + */ + function hasPath(object, path, hasFunc) { + path = castPath(path, object); + + var index = -1, + length = path.length, + result = false; + + while (++index < length) { + var key = toKey(path[index]); + if (!(result = object != null && hasFunc(object, key))) { + break; + } + object = object[key]; + } + if (result || ++index != length) { + return result; + } + length = object == null ? 0 : object.length; + return !!length && isLength(length) && isIndex(key, length) && + (isArray(object) || isArguments(object)); + } + + /** + * Initializes an array clone. + * + * @private + * @param {Array} array The array to clone. + * @returns {Array} Returns the initialized clone. + */ + function initCloneArray(array) { + var length = array.length, + result = new array.constructor(length); + + // Add properties assigned by `RegExp#exec`. + if (length && typeof array[0] == 'string' && hasOwnProperty.call(array, 'index')) { + result.index = array.index; + result.input = array.input; + } + return result; + } + + /** + * Initializes an object clone. + * + * @private + * @param {Object} object The object to clone. + * @returns {Object} Returns the initialized clone. + */ + function initCloneObject(object) { + return (typeof object.constructor == 'function' && !isPrototype(object)) + ? baseCreate(getPrototype(object)) + : {}; + } + + /** + * Initializes an object clone based on its `toStringTag`. + * + * **Note:** This function only supports cloning values with tags of + * `Boolean`, `Date`, `Error`, `Map`, `Number`, `RegExp`, `Set`, or `String`. + * + * @private + * @param {Object} object The object to clone. + * @param {string} tag The `toStringTag` of the object to clone. + * @param {boolean} [isDeep] Specify a deep clone. + * @returns {Object} Returns the initialized clone. + */ + function initCloneByTag(object, tag, isDeep) { + var Ctor = object.constructor; + switch (tag) { + case arrayBufferTag: + return cloneArrayBuffer(object); + + case boolTag: + case dateTag: + return new Ctor(+object); + + case dataViewTag: + return cloneDataView(object, isDeep); + + case float32Tag: case float64Tag: + case int8Tag: case int16Tag: case int32Tag: + case uint8Tag: case uint8ClampedTag: case uint16Tag: case uint32Tag: + return cloneTypedArray(object, isDeep); + + case mapTag: + return new Ctor; + + case numberTag: + case stringTag: + return new Ctor(object); + + case regexpTag: + return cloneRegExp(object); + + case setTag: + return new Ctor; + + case symbolTag: + return cloneSymbol(object); + } + } + + /** + * Inserts wrapper `details` in a comment at the top of the `source` body. + * + * @private + * @param {string} source The source to modify. + * @returns {Array} details The details to insert. + * @returns {string} Returns the modified source. + */ + function insertWrapDetails(source, details) { + var length = details.length; + if (!length) { + return source; + } + var lastIndex = length - 1; + details[lastIndex] = (length > 1 ? '& ' : '') + details[lastIndex]; + details = details.join(length > 2 ? ', ' : ' '); + return source.replace(reWrapComment, '{\n/* [wrapped with ' + details + '] */\n'); + } + + /** + * Checks if `value` is a flattenable `arguments` object or array. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is flattenable, else `false`. + */ + function isFlattenable(value) { + return isArray(value) || isArguments(value) || + !!(spreadableSymbol && value && value[spreadableSymbol]); + } + + /** + * Checks if `value` is a valid array-like index. + * + * @private + * @param {*} value The value to check. + * @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index. + * @returns {boolean} Returns `true` if `value` is a valid index, else `false`. + */ + function isIndex(value, length) { + var type = typeof value; + length = length == null ? MAX_SAFE_INTEGER : length; + + return !!length && + (type == 'number' || + (type != 'symbol' && reIsUint.test(value))) && + (value > -1 && value % 1 == 0 && value < length); + } + + /** + * Checks if the given arguments are from an iteratee call. + * + * @private + * @param {*} value The potential iteratee value argument. + * @param {*} index The potential iteratee index or key argument. + * @param {*} object The potential iteratee object argument. + * @returns {boolean} Returns `true` if the arguments are from an iteratee call, + * else `false`. + */ + function isIterateeCall(value, index, object) { + if (!isObject(object)) { + return false; + } + var type = typeof index; + if (type == 'number' + ? (isArrayLike(object) && isIndex(index, object.length)) + : (type == 'string' && index in object) + ) { + return eq(object[index], value); + } + return false; + } + + /** + * Checks if `value` is a property name and not a property path. + * + * @private + * @param {*} value The value to check. + * @param {Object} [object] The object to query keys on. + * @returns {boolean} Returns `true` if `value` is a property name, else `false`. + */ + function isKey(value, object) { + if (isArray(value)) { + return false; + } + var type = typeof value; + if (type == 'number' || type == 'symbol' || type == 'boolean' || + value == null || isSymbol(value)) { + return true; + } + return reIsPlainProp.test(value) || !reIsDeepProp.test(value) || + (object != null && value in Object(object)); + } + + /** + * Checks if `value` is suitable for use as unique object key. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is suitable, else `false`. + */ + function isKeyable(value) { + var type = typeof value; + return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean') + ? (value !== '__proto__') + : (value === null); + } + + /** + * Checks if `func` has a lazy counterpart. + * + * @private + * @param {Function} func The function to check. + * @returns {boolean} Returns `true` if `func` has a lazy counterpart, + * else `false`. + */ + function isLaziable(func) { + var funcName = getFuncName(func), + other = lodash[funcName]; + + if (typeof other != 'function' || !(funcName in LazyWrapper.prototype)) { + return false; + } + if (func === other) { + return true; + } + var data = getData(other); + return !!data && func === data[0]; + } + + /** + * Checks if `func` has its source masked. + * + * @private + * @param {Function} func The function to check. + * @returns {boolean} Returns `true` if `func` is masked, else `false`. + */ + function isMasked(func) { + return !!maskSrcKey && (maskSrcKey in func); + } + + /** + * Checks if `func` is capable of being masked. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `func` is maskable, else `false`. + */ + var isMaskable = coreJsData ? isFunction : stubFalse; + + /** + * Checks if `value` is likely a prototype object. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a prototype, else `false`. + */ + function isPrototype(value) { + var Ctor = value && value.constructor, + proto = (typeof Ctor == 'function' && Ctor.prototype) || objectProto; + + return value === proto; + } + + /** + * Checks if `value` is suitable for strict equality comparisons, i.e. `===`. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` if suitable for strict + * equality comparisons, else `false`. + */ + function isStrictComparable(value) { + return value === value && !isObject(value); + } + + /** + * A specialized version of `matchesProperty` for source values suitable + * for strict equality comparisons, i.e. `===`. + * + * @private + * @param {string} key The key of the property to get. + * @param {*} srcValue The value to match. + * @returns {Function} Returns the new spec function. + */ + function matchesStrictComparable(key, srcValue) { + return function(object) { + if (object == null) { + return false; + } + return object[key] === srcValue && + (srcValue !== undefined || (key in Object(object))); + }; + } + + /** + * A specialized version of `_.memoize` which clears the memoized function's + * cache when it exceeds `MAX_MEMOIZE_SIZE`. + * + * @private + * @param {Function} func The function to have its output memoized. + * @returns {Function} Returns the new memoized function. + */ + function memoizeCapped(func) { + var result = memoize(func, function(key) { + if (cache.size === MAX_MEMOIZE_SIZE) { + cache.clear(); + } + return key; + }); + + var cache = result.cache; + return result; + } + + /** + * Merges the function metadata of `source` into `data`. + * + * Merging metadata reduces the number of wrappers used to invoke a function. + * This is possible because methods like `_.bind`, `_.curry`, and `_.partial` + * may be applied regardless of execution order. Methods like `_.ary` and + * `_.rearg` modify function arguments, making the order in which they are + * executed important, preventing the merging of metadata. However, we make + * an exception for a safe combined case where curried functions have `_.ary` + * and or `_.rearg` applied. + * + * @private + * @param {Array} data The destination metadata. + * @param {Array} source The source metadata. + * @returns {Array} Returns `data`. + */ + function mergeData(data, source) { + var bitmask = data[1], + srcBitmask = source[1], + newBitmask = bitmask | srcBitmask, + isCommon = newBitmask < (WRAP_BIND_FLAG | WRAP_BIND_KEY_FLAG | WRAP_ARY_FLAG); + + var isCombo = + ((srcBitmask == WRAP_ARY_FLAG) && (bitmask == WRAP_CURRY_FLAG)) || + ((srcBitmask == WRAP_ARY_FLAG) && (bitmask == WRAP_REARG_FLAG) && (data[7].length <= source[8])) || + ((srcBitmask == (WRAP_ARY_FLAG | WRAP_REARG_FLAG)) && (source[7].length <= source[8]) && (bitmask == WRAP_CURRY_FLAG)); + + // Exit early if metadata can't be merged. + if (!(isCommon || isCombo)) { + return data; + } + // Use source `thisArg` if available. + if (srcBitmask & WRAP_BIND_FLAG) { + data[2] = source[2]; + // Set when currying a bound function. + newBitmask |= bitmask & WRAP_BIND_FLAG ? 0 : WRAP_CURRY_BOUND_FLAG; + } + // Compose partial arguments. + var value = source[3]; + if (value) { + var partials = data[3]; + data[3] = partials ? composeArgs(partials, value, source[4]) : value; + data[4] = partials ? replaceHolders(data[3], PLACEHOLDER) : source[4]; + } + // Compose partial right arguments. + value = source[5]; + if (value) { + partials = data[5]; + data[5] = partials ? composeArgsRight(partials, value, source[6]) : value; + data[6] = partials ? replaceHolders(data[5], PLACEHOLDER) : source[6]; + } + // Use source `argPos` if available. + value = source[7]; + if (value) { + data[7] = value; + } + // Use source `ary` if it's smaller. + if (srcBitmask & WRAP_ARY_FLAG) { + data[8] = data[8] == null ? source[8] : nativeMin(data[8], source[8]); + } + // Use source `arity` if one is not provided. + if (data[9] == null) { + data[9] = source[9]; + } + // Use source `func` and merge bitmasks. + data[0] = source[0]; + data[1] = newBitmask; + + return data; + } + + /** + * This function is like + * [`Object.keys`](http://ecma-international.org/ecma-262/7.0/#sec-object.keys) + * except that it includes inherited enumerable properties. + * + * @private + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + */ + function nativeKeysIn(object) { + var result = []; + if (object != null) { + for (var key in Object(object)) { + result.push(key); + } + } + return result; + } + + /** + * Converts `value` to a string using `Object.prototype.toString`. + * + * @private + * @param {*} value The value to convert. + * @returns {string} Returns the converted string. + */ + function objectToString(value) { + return nativeObjectToString.call(value); + } + + /** + * A specialized version of `baseRest` which transforms the rest array. + * + * @private + * @param {Function} func The function to apply a rest parameter to. + * @param {number} [start=func.length-1] The start position of the rest parameter. + * @param {Function} transform The rest array transform. + * @returns {Function} Returns the new function. + */ + function overRest(func, start, transform) { + start = nativeMax(start === undefined ? (func.length - 1) : start, 0); + return function() { + var args = arguments, + index = -1, + length = nativeMax(args.length - start, 0), + array = Array(length); + + while (++index < length) { + array[index] = args[start + index]; + } + index = -1; + var otherArgs = Array(start + 1); + while (++index < start) { + otherArgs[index] = args[index]; + } + otherArgs[start] = transform(array); + return apply(func, this, otherArgs); + }; + } + + /** + * Gets the parent value at `path` of `object`. + * + * @private + * @param {Object} object The object to query. + * @param {Array} path The path to get the parent value of. + * @returns {*} Returns the parent value. + */ + function parent(object, path) { + return path.length < 2 ? object : baseGet(object, baseSlice(path, 0, -1)); + } + + /** + * Reorder `array` according to the specified indexes where the element at + * the first index is assigned as the first element, the element at + * the second index is assigned as the second element, and so on. + * + * @private + * @param {Array} array The array to reorder. + * @param {Array} indexes The arranged array indexes. + * @returns {Array} Returns `array`. + */ + function reorder(array, indexes) { + var arrLength = array.length, + length = nativeMin(indexes.length, arrLength), + oldArray = copyArray(array); + + while (length--) { + var index = indexes[length]; + array[length] = isIndex(index, arrLength) ? oldArray[index] : undefined; + } + return array; + } + + /** + * Gets the value at `key`, unless `key` is "__proto__" or "constructor". + * + * @private + * @param {Object} object The object to query. + * @param {string} key The key of the property to get. + * @returns {*} Returns the property value. + */ + function safeGet(object, key) { + if (key === 'constructor' && typeof object[key] === 'function') { + return; + } + + if (key == '__proto__') { + return; + } + + return object[key]; + } + + /** + * Sets metadata for `func`. + * + * **Note:** If this function becomes hot, i.e. is invoked a lot in a short + * period of time, it will trip its breaker and transition to an identity + * function to avoid garbage collection pauses in V8. See + * [V8 issue 2070](https://bugs.chromium.org/p/v8/issues/detail?id=2070) + * for more details. + * + * @private + * @param {Function} func The function to associate metadata with. + * @param {*} data The metadata. + * @returns {Function} Returns `func`. + */ + var setData = shortOut(baseSetData); + + /** + * A simple wrapper around the global [`setTimeout`](https://mdn.io/setTimeout). + * + * @private + * @param {Function} func The function to delay. + * @param {number} wait The number of milliseconds to delay invocation. + * @returns {number|Object} Returns the timer id or timeout object. + */ + var setTimeout = ctxSetTimeout || function(func, wait) { + return root.setTimeout(func, wait); + }; + + /** + * Sets the `toString` method of `func` to return `string`. + * + * @private + * @param {Function} func The function to modify. + * @param {Function} string The `toString` result. + * @returns {Function} Returns `func`. + */ + var setToString = shortOut(baseSetToString); + + /** + * Sets the `toString` method of `wrapper` to mimic the source of `reference` + * with wrapper details in a comment at the top of the source body. + * + * @private + * @param {Function} wrapper The function to modify. + * @param {Function} reference The reference function. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @returns {Function} Returns `wrapper`. + */ + function setWrapToString(wrapper, reference, bitmask) { + var source = (reference + ''); + return setToString(wrapper, insertWrapDetails(source, updateWrapDetails(getWrapDetails(source), bitmask))); + } + + /** + * Creates a function that'll short out and invoke `identity` instead + * of `func` when it's called `HOT_COUNT` or more times in `HOT_SPAN` + * milliseconds. + * + * @private + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new shortable function. + */ + function shortOut(func) { + var count = 0, + lastCalled = 0; + + return function() { + var stamp = nativeNow(), + remaining = HOT_SPAN - (stamp - lastCalled); + + lastCalled = stamp; + if (remaining > 0) { + if (++count >= HOT_COUNT) { + return arguments[0]; + } + } else { + count = 0; + } + return func.apply(undefined, arguments); + }; + } + + /** + * A specialized version of `_.shuffle` which mutates and sets the size of `array`. + * + * @private + * @param {Array} array The array to shuffle. + * @param {number} [size=array.length] The size of `array`. + * @returns {Array} Returns `array`. + */ + function shuffleSelf(array, size) { + var index = -1, + length = array.length, + lastIndex = length - 1; + + size = size === undefined ? length : size; + while (++index < size) { + var rand = baseRandom(index, lastIndex), + value = array[rand]; + + array[rand] = array[index]; + array[index] = value; + } + array.length = size; + return array; + } + + /** + * Converts `string` to a property path array. + * + * @private + * @param {string} string The string to convert. + * @returns {Array} Returns the property path array. + */ + var stringToPath = memoizeCapped(function(string) { + var result = []; + if (string.charCodeAt(0) === 46 /* . */) { + result.push(''); + } + string.replace(rePropName, function(match, number, quote, subString) { + result.push(quote ? subString.replace(reEscapeChar, '$1') : (number || match)); + }); + return result; + }); + + /** + * Converts `value` to a string key if it's not a string or symbol. + * + * @private + * @param {*} value The value to inspect. + * @returns {string|symbol} Returns the key. + */ + function toKey(value) { + if (typeof value == 'string' || isSymbol(value)) { + return value; + } + var result = (value + ''); + return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result; + } + + /** + * Converts `func` to its source code. + * + * @private + * @param {Function} func The function to convert. + * @returns {string} Returns the source code. + */ + function toSource(func) { + if (func != null) { + try { + return funcToString.call(func); + } catch (e) {} + try { + return (func + ''); + } catch (e) {} + } + return ''; + } + + /** + * Updates wrapper `details` based on `bitmask` flags. + * + * @private + * @returns {Array} details The details to modify. + * @param {number} bitmask The bitmask flags. See `createWrap` for more details. + * @returns {Array} Returns `details`. + */ + function updateWrapDetails(details, bitmask) { + arrayEach(wrapFlags, function(pair) { + var value = '_.' + pair[0]; + if ((bitmask & pair[1]) && !arrayIncludes(details, value)) { + details.push(value); + } + }); + return details.sort(); + } + + /** + * Creates a clone of `wrapper`. + * + * @private + * @param {Object} wrapper The wrapper to clone. + * @returns {Object} Returns the cloned wrapper. + */ + function wrapperClone(wrapper) { + if (wrapper instanceof LazyWrapper) { + return wrapper.clone(); + } + var result = new LodashWrapper(wrapper.__wrapped__, wrapper.__chain__); + result.__actions__ = copyArray(wrapper.__actions__); + result.__index__ = wrapper.__index__; + result.__values__ = wrapper.__values__; + return result; + } + + /*------------------------------------------------------------------------*/ + + /** + * Creates an array of elements split into groups the length of `size`. + * If `array` can't be split evenly, the final chunk will be the remaining + * elements. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to process. + * @param {number} [size=1] The length of each chunk + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the new array of chunks. + * @example + * + * _.chunk(['a', 'b', 'c', 'd'], 2); + * // => [['a', 'b'], ['c', 'd']] + * + * _.chunk(['a', 'b', 'c', 'd'], 3); + * // => [['a', 'b', 'c'], ['d']] + */ + function chunk(array, size, guard) { + if ((guard ? isIterateeCall(array, size, guard) : size === undefined)) { + size = 1; + } else { + size = nativeMax(toInteger(size), 0); + } + var length = array == null ? 0 : array.length; + if (!length || size < 1) { + return []; + } + var index = 0, + resIndex = 0, + result = Array(nativeCeil(length / size)); + + while (index < length) { + result[resIndex++] = baseSlice(array, index, (index += size)); + } + return result; + } + + /** + * Creates an array with all falsey values removed. The values `false`, `null`, + * `0`, `""`, `undefined`, and `NaN` are falsey. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to compact. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * _.compact([0, 1, false, 2, '', 3]); + * // => [1, 2, 3] + */ + function compact(array) { + var index = -1, + length = array == null ? 0 : array.length, + resIndex = 0, + result = []; + + while (++index < length) { + var value = array[index]; + if (value) { + result[resIndex++] = value; + } + } + return result; + } + + /** + * Creates a new array concatenating `array` with any additional arrays + * and/or values. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to concatenate. + * @param {...*} [values] The values to concatenate. + * @returns {Array} Returns the new concatenated array. + * @example + * + * var array = [1]; + * var other = _.concat(array, 2, [3], [[4]]); + * + * console.log(other); + * // => [1, 2, 3, [4]] + * + * console.log(array); + * // => [1] + */ + function concat() { + var length = arguments.length; + if (!length) { + return []; + } + var args = Array(length - 1), + array = arguments[0], + index = length; + + while (index--) { + args[index - 1] = arguments[index]; + } + return arrayPush(isArray(array) ? copyArray(array) : [array], baseFlatten(args, 1)); + } + + /** + * Creates an array of `array` values not included in the other given arrays + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. The order and references of result values are + * determined by the first array. + * + * **Note:** Unlike `_.pullAll`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {...Array} [values] The values to exclude. + * @returns {Array} Returns the new array of filtered values. + * @see _.without, _.xor + * @example + * + * _.difference([2, 1], [2, 3]); + * // => [1] + */ + var difference = baseRest(function(array, values) { + return isArrayLikeObject(array) + ? baseDifference(array, baseFlatten(values, 1, isArrayLikeObject, true)) + : []; + }); + + /** + * This method is like `_.difference` except that it accepts `iteratee` which + * is invoked for each element of `array` and `values` to generate the criterion + * by which they're compared. The order and references of result values are + * determined by the first array. The iteratee is invoked with one argument: + * (value). + * + * **Note:** Unlike `_.pullAllBy`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {...Array} [values] The values to exclude. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * _.differenceBy([2.1, 1.2], [2.3, 3.4], Math.floor); + * // => [1.2] + * + * // The `_.property` iteratee shorthand. + * _.differenceBy([{ 'x': 2 }, { 'x': 1 }], [{ 'x': 1 }], 'x'); + * // => [{ 'x': 2 }] + */ + var differenceBy = baseRest(function(array, values) { + var iteratee = last(values); + if (isArrayLikeObject(iteratee)) { + iteratee = undefined; + } + return isArrayLikeObject(array) + ? baseDifference(array, baseFlatten(values, 1, isArrayLikeObject, true), getIteratee(iteratee, 2)) + : []; + }); + + /** + * This method is like `_.difference` except that it accepts `comparator` + * which is invoked to compare elements of `array` to `values`. The order and + * references of result values are determined by the first array. The comparator + * is invoked with two arguments: (arrVal, othVal). + * + * **Note:** Unlike `_.pullAllWith`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {...Array} [values] The values to exclude. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }]; + * + * _.differenceWith(objects, [{ 'x': 1, 'y': 2 }], _.isEqual); + * // => [{ 'x': 2, 'y': 1 }] + */ + var differenceWith = baseRest(function(array, values) { + var comparator = last(values); + if (isArrayLikeObject(comparator)) { + comparator = undefined; + } + return isArrayLikeObject(array) + ? baseDifference(array, baseFlatten(values, 1, isArrayLikeObject, true), undefined, comparator) + : []; + }); + + /** + * Creates a slice of `array` with `n` elements dropped from the beginning. + * + * @static + * @memberOf _ + * @since 0.5.0 + * @category Array + * @param {Array} array The array to query. + * @param {number} [n=1] The number of elements to drop. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.drop([1, 2, 3]); + * // => [2, 3] + * + * _.drop([1, 2, 3], 2); + * // => [3] + * + * _.drop([1, 2, 3], 5); + * // => [] + * + * _.drop([1, 2, 3], 0); + * // => [1, 2, 3] + */ + function drop(array, n, guard) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + n = (guard || n === undefined) ? 1 : toInteger(n); + return baseSlice(array, n < 0 ? 0 : n, length); + } + + /** + * Creates a slice of `array` with `n` elements dropped from the end. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {number} [n=1] The number of elements to drop. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.dropRight([1, 2, 3]); + * // => [1, 2] + * + * _.dropRight([1, 2, 3], 2); + * // => [1] + * + * _.dropRight([1, 2, 3], 5); + * // => [] + * + * _.dropRight([1, 2, 3], 0); + * // => [1, 2, 3] + */ + function dropRight(array, n, guard) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + n = (guard || n === undefined) ? 1 : toInteger(n); + n = length - n; + return baseSlice(array, 0, n < 0 ? 0 : n); + } + + /** + * Creates a slice of `array` excluding elements dropped from the end. + * Elements are dropped until `predicate` returns falsey. The predicate is + * invoked with three arguments: (value, index, array). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the slice of `array`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': true }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': false } + * ]; + * + * _.dropRightWhile(users, function(o) { return !o.active; }); + * // => objects for ['barney'] + * + * // The `_.matches` iteratee shorthand. + * _.dropRightWhile(users, { 'user': 'pebbles', 'active': false }); + * // => objects for ['barney', 'fred'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.dropRightWhile(users, ['active', false]); + * // => objects for ['barney'] + * + * // The `_.property` iteratee shorthand. + * _.dropRightWhile(users, 'active'); + * // => objects for ['barney', 'fred', 'pebbles'] + */ + function dropRightWhile(array, predicate) { + return (array && array.length) + ? baseWhile(array, getIteratee(predicate, 3), true, true) + : []; + } + + /** + * Creates a slice of `array` excluding elements dropped from the beginning. + * Elements are dropped until `predicate` returns falsey. The predicate is + * invoked with three arguments: (value, index, array). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the slice of `array`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': false }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': true } + * ]; + * + * _.dropWhile(users, function(o) { return !o.active; }); + * // => objects for ['pebbles'] + * + * // The `_.matches` iteratee shorthand. + * _.dropWhile(users, { 'user': 'barney', 'active': false }); + * // => objects for ['fred', 'pebbles'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.dropWhile(users, ['active', false]); + * // => objects for ['pebbles'] + * + * // The `_.property` iteratee shorthand. + * _.dropWhile(users, 'active'); + * // => objects for ['barney', 'fred', 'pebbles'] + */ + function dropWhile(array, predicate) { + return (array && array.length) + ? baseWhile(array, getIteratee(predicate, 3), true) + : []; + } + + /** + * Fills elements of `array` with `value` from `start` up to, but not + * including, `end`. + * + * **Note:** This method mutates `array`. + * + * @static + * @memberOf _ + * @since 3.2.0 + * @category Array + * @param {Array} array The array to fill. + * @param {*} value The value to fill `array` with. + * @param {number} [start=0] The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns `array`. + * @example + * + * var array = [1, 2, 3]; + * + * _.fill(array, 'a'); + * console.log(array); + * // => ['a', 'a', 'a'] + * + * _.fill(Array(3), 2); + * // => [2, 2, 2] + * + * _.fill([4, 6, 8, 10], '*', 1, 3); + * // => [4, '*', '*', 10] + */ + function fill(array, value, start, end) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + if (start && typeof start != 'number' && isIterateeCall(array, value, start)) { + start = 0; + end = length; + } + return baseFill(array, value, start, end); + } + + /** + * This method is like `_.find` except that it returns the index of the first + * element `predicate` returns truthy for instead of the element itself. + * + * @static + * @memberOf _ + * @since 1.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param {number} [fromIndex=0] The index to search from. + * @returns {number} Returns the index of the found element, else `-1`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': false }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': true } + * ]; + * + * _.findIndex(users, function(o) { return o.user == 'barney'; }); + * // => 0 + * + * // The `_.matches` iteratee shorthand. + * _.findIndex(users, { 'user': 'fred', 'active': false }); + * // => 1 + * + * // The `_.matchesProperty` iteratee shorthand. + * _.findIndex(users, ['active', false]); + * // => 0 + * + * // The `_.property` iteratee shorthand. + * _.findIndex(users, 'active'); + * // => 2 + */ + function findIndex(array, predicate, fromIndex) { + var length = array == null ? 0 : array.length; + if (!length) { + return -1; + } + var index = fromIndex == null ? 0 : toInteger(fromIndex); + if (index < 0) { + index = nativeMax(length + index, 0); + } + return baseFindIndex(array, getIteratee(predicate, 3), index); + } + + /** + * This method is like `_.findIndex` except that it iterates over elements + * of `collection` from right to left. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param {number} [fromIndex=array.length-1] The index to search from. + * @returns {number} Returns the index of the found element, else `-1`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': true }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': false } + * ]; + * + * _.findLastIndex(users, function(o) { return o.user == 'pebbles'; }); + * // => 2 + * + * // The `_.matches` iteratee shorthand. + * _.findLastIndex(users, { 'user': 'barney', 'active': true }); + * // => 0 + * + * // The `_.matchesProperty` iteratee shorthand. + * _.findLastIndex(users, ['active', false]); + * // => 2 + * + * // The `_.property` iteratee shorthand. + * _.findLastIndex(users, 'active'); + * // => 0 + */ + function findLastIndex(array, predicate, fromIndex) { + var length = array == null ? 0 : array.length; + if (!length) { + return -1; + } + var index = length - 1; + if (fromIndex !== undefined) { + index = toInteger(fromIndex); + index = fromIndex < 0 + ? nativeMax(length + index, 0) + : nativeMin(index, length - 1); + } + return baseFindIndex(array, getIteratee(predicate, 3), index, true); + } + + /** + * Flattens `array` a single level deep. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to flatten. + * @returns {Array} Returns the new flattened array. + * @example + * + * _.flatten([1, [2, [3, [4]], 5]]); + * // => [1, 2, [3, [4]], 5] + */ + function flatten(array) { + var length = array == null ? 0 : array.length; + return length ? baseFlatten(array, 1) : []; + } + + /** + * Recursively flattens `array`. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to flatten. + * @returns {Array} Returns the new flattened array. + * @example + * + * _.flattenDeep([1, [2, [3, [4]], 5]]); + * // => [1, 2, 3, 4, 5] + */ + function flattenDeep(array) { + var length = array == null ? 0 : array.length; + return length ? baseFlatten(array, INFINITY) : []; + } + + /** + * Recursively flatten `array` up to `depth` times. + * + * @static + * @memberOf _ + * @since 4.4.0 + * @category Array + * @param {Array} array The array to flatten. + * @param {number} [depth=1] The maximum recursion depth. + * @returns {Array} Returns the new flattened array. + * @example + * + * var array = [1, [2, [3, [4]], 5]]; + * + * _.flattenDepth(array, 1); + * // => [1, 2, [3, [4]], 5] + * + * _.flattenDepth(array, 2); + * // => [1, 2, 3, [4], 5] + */ + function flattenDepth(array, depth) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + depth = depth === undefined ? 1 : toInteger(depth); + return baseFlatten(array, depth); + } + + /** + * The inverse of `_.toPairs`; this method returns an object composed + * from key-value `pairs`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} pairs The key-value pairs. + * @returns {Object} Returns the new object. + * @example + * + * _.fromPairs([['a', 1], ['b', 2]]); + * // => { 'a': 1, 'b': 2 } + */ + function fromPairs(pairs) { + var index = -1, + length = pairs == null ? 0 : pairs.length, + result = {}; + + while (++index < length) { + var pair = pairs[index]; + result[pair[0]] = pair[1]; + } + return result; + } + + /** + * Gets the first element of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @alias first + * @category Array + * @param {Array} array The array to query. + * @returns {*} Returns the first element of `array`. + * @example + * + * _.head([1, 2, 3]); + * // => 1 + * + * _.head([]); + * // => undefined + */ + function head(array) { + return (array && array.length) ? array[0] : undefined; + } + + /** + * Gets the index at which the first occurrence of `value` is found in `array` + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. If `fromIndex` is negative, it's used as the + * offset from the end of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} [fromIndex=0] The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + * @example + * + * _.indexOf([1, 2, 1, 2], 2); + * // => 1 + * + * // Search from the `fromIndex`. + * _.indexOf([1, 2, 1, 2], 2, 2); + * // => 3 + */ + function indexOf(array, value, fromIndex) { + var length = array == null ? 0 : array.length; + if (!length) { + return -1; + } + var index = fromIndex == null ? 0 : toInteger(fromIndex); + if (index < 0) { + index = nativeMax(length + index, 0); + } + return baseIndexOf(array, value, index); + } + + /** + * Gets all but the last element of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to query. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.initial([1, 2, 3]); + * // => [1, 2] + */ + function initial(array) { + var length = array == null ? 0 : array.length; + return length ? baseSlice(array, 0, -1) : []; + } + + /** + * Creates an array of unique values that are included in all given arrays + * using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. The order and references of result values are + * determined by the first array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @returns {Array} Returns the new array of intersecting values. + * @example + * + * _.intersection([2, 1], [2, 3]); + * // => [2] + */ + var intersection = baseRest(function(arrays) { + var mapped = arrayMap(arrays, castArrayLikeObject); + return (mapped.length && mapped[0] === arrays[0]) + ? baseIntersection(mapped) + : []; + }); + + /** + * This method is like `_.intersection` except that it accepts `iteratee` + * which is invoked for each element of each `arrays` to generate the criterion + * by which they're compared. The order and references of result values are + * determined by the first array. The iteratee is invoked with one argument: + * (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns the new array of intersecting values. + * @example + * + * _.intersectionBy([2.1, 1.2], [2.3, 3.4], Math.floor); + * // => [2.1] + * + * // The `_.property` iteratee shorthand. + * _.intersectionBy([{ 'x': 1 }], [{ 'x': 2 }, { 'x': 1 }], 'x'); + * // => [{ 'x': 1 }] + */ + var intersectionBy = baseRest(function(arrays) { + var iteratee = last(arrays), + mapped = arrayMap(arrays, castArrayLikeObject); + + if (iteratee === last(mapped)) { + iteratee = undefined; + } else { + mapped.pop(); + } + return (mapped.length && mapped[0] === arrays[0]) + ? baseIntersection(mapped, getIteratee(iteratee, 2)) + : []; + }); + + /** + * This method is like `_.intersection` except that it accepts `comparator` + * which is invoked to compare elements of `arrays`. The order and references + * of result values are determined by the first array. The comparator is + * invoked with two arguments: (arrVal, othVal). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of intersecting values. + * @example + * + * var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }]; + * var others = [{ 'x': 1, 'y': 1 }, { 'x': 1, 'y': 2 }]; + * + * _.intersectionWith(objects, others, _.isEqual); + * // => [{ 'x': 1, 'y': 2 }] + */ + var intersectionWith = baseRest(function(arrays) { + var comparator = last(arrays), + mapped = arrayMap(arrays, castArrayLikeObject); + + comparator = typeof comparator == 'function' ? comparator : undefined; + if (comparator) { + mapped.pop(); + } + return (mapped.length && mapped[0] === arrays[0]) + ? baseIntersection(mapped, undefined, comparator) + : []; + }); + + /** + * Converts all elements in `array` into a string separated by `separator`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to convert. + * @param {string} [separator=','] The element separator. + * @returns {string} Returns the joined string. + * @example + * + * _.join(['a', 'b', 'c'], '~'); + * // => 'a~b~c' + */ + function join(array, separator) { + return array == null ? '' : nativeJoin.call(array, separator); + } + + /** + * Gets the last element of `array`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to query. + * @returns {*} Returns the last element of `array`. + * @example + * + * _.last([1, 2, 3]); + * // => 3 + */ + function last(array) { + var length = array == null ? 0 : array.length; + return length ? array[length - 1] : undefined; + } + + /** + * This method is like `_.indexOf` except that it iterates over elements of + * `array` from right to left. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} [fromIndex=array.length-1] The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + * @example + * + * _.lastIndexOf([1, 2, 1, 2], 2); + * // => 3 + * + * // Search from the `fromIndex`. + * _.lastIndexOf([1, 2, 1, 2], 2, 2); + * // => 1 + */ + function lastIndexOf(array, value, fromIndex) { + var length = array == null ? 0 : array.length; + if (!length) { + return -1; + } + var index = length; + if (fromIndex !== undefined) { + index = toInteger(fromIndex); + index = index < 0 ? nativeMax(length + index, 0) : nativeMin(index, length - 1); + } + return value === value + ? strictLastIndexOf(array, value, index) + : baseFindIndex(array, baseIsNaN, index, true); + } + + /** + * Gets the element at index `n` of `array`. If `n` is negative, the nth + * element from the end is returned. + * + * @static + * @memberOf _ + * @since 4.11.0 + * @category Array + * @param {Array} array The array to query. + * @param {number} [n=0] The index of the element to return. + * @returns {*} Returns the nth element of `array`. + * @example + * + * var array = ['a', 'b', 'c', 'd']; + * + * _.nth(array, 1); + * // => 'b' + * + * _.nth(array, -2); + * // => 'c'; + */ + function nth(array, n) { + return (array && array.length) ? baseNth(array, toInteger(n)) : undefined; + } + + /** + * Removes all given values from `array` using + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. + * + * **Note:** Unlike `_.without`, this method mutates `array`. Use `_.remove` + * to remove elements from an array by predicate. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Array + * @param {Array} array The array to modify. + * @param {...*} [values] The values to remove. + * @returns {Array} Returns `array`. + * @example + * + * var array = ['a', 'b', 'c', 'a', 'b', 'c']; + * + * _.pull(array, 'a', 'c'); + * console.log(array); + * // => ['b', 'b'] + */ + var pull = baseRest(pullAll); + + /** + * This method is like `_.pull` except that it accepts an array of values to remove. + * + * **Note:** Unlike `_.difference`, this method mutates `array`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to modify. + * @param {Array} values The values to remove. + * @returns {Array} Returns `array`. + * @example + * + * var array = ['a', 'b', 'c', 'a', 'b', 'c']; + * + * _.pullAll(array, ['a', 'c']); + * console.log(array); + * // => ['b', 'b'] + */ + function pullAll(array, values) { + return (array && array.length && values && values.length) + ? basePullAll(array, values) + : array; + } + + /** + * This method is like `_.pullAll` except that it accepts `iteratee` which is + * invoked for each element of `array` and `values` to generate the criterion + * by which they're compared. The iteratee is invoked with one argument: (value). + * + * **Note:** Unlike `_.differenceBy`, this method mutates `array`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to modify. + * @param {Array} values The values to remove. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns `array`. + * @example + * + * var array = [{ 'x': 1 }, { 'x': 2 }, { 'x': 3 }, { 'x': 1 }]; + * + * _.pullAllBy(array, [{ 'x': 1 }, { 'x': 3 }], 'x'); + * console.log(array); + * // => [{ 'x': 2 }] + */ + function pullAllBy(array, values, iteratee) { + return (array && array.length && values && values.length) + ? basePullAll(array, values, getIteratee(iteratee, 2)) + : array; + } + + /** + * This method is like `_.pullAll` except that it accepts `comparator` which + * is invoked to compare elements of `array` to `values`. The comparator is + * invoked with two arguments: (arrVal, othVal). + * + * **Note:** Unlike `_.differenceWith`, this method mutates `array`. + * + * @static + * @memberOf _ + * @since 4.6.0 + * @category Array + * @param {Array} array The array to modify. + * @param {Array} values The values to remove. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns `array`. + * @example + * + * var array = [{ 'x': 1, 'y': 2 }, { 'x': 3, 'y': 4 }, { 'x': 5, 'y': 6 }]; + * + * _.pullAllWith(array, [{ 'x': 3, 'y': 4 }], _.isEqual); + * console.log(array); + * // => [{ 'x': 1, 'y': 2 }, { 'x': 5, 'y': 6 }] + */ + function pullAllWith(array, values, comparator) { + return (array && array.length && values && values.length) + ? basePullAll(array, values, undefined, comparator) + : array; + } + + /** + * Removes elements from `array` corresponding to `indexes` and returns an + * array of removed elements. + * + * **Note:** Unlike `_.at`, this method mutates `array`. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to modify. + * @param {...(number|number[])} [indexes] The indexes of elements to remove. + * @returns {Array} Returns the new array of removed elements. + * @example + * + * var array = ['a', 'b', 'c', 'd']; + * var pulled = _.pullAt(array, [1, 3]); + * + * console.log(array); + * // => ['a', 'c'] + * + * console.log(pulled); + * // => ['b', 'd'] + */ + var pullAt = flatRest(function(array, indexes) { + var length = array == null ? 0 : array.length, + result = baseAt(array, indexes); + + basePullAt(array, arrayMap(indexes, function(index) { + return isIndex(index, length) ? +index : index; + }).sort(compareAscending)); + + return result; + }); + + /** + * Removes all elements from `array` that `predicate` returns truthy for + * and returns an array of the removed elements. The predicate is invoked + * with three arguments: (value, index, array). + * + * **Note:** Unlike `_.filter`, this method mutates `array`. Use `_.pull` + * to pull elements from an array by value. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Array + * @param {Array} array The array to modify. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new array of removed elements. + * @example + * + * var array = [1, 2, 3, 4]; + * var evens = _.remove(array, function(n) { + * return n % 2 == 0; + * }); + * + * console.log(array); + * // => [1, 3] + * + * console.log(evens); + * // => [2, 4] + */ + function remove(array, predicate) { + var result = []; + if (!(array && array.length)) { + return result; + } + var index = -1, + indexes = [], + length = array.length; + + predicate = getIteratee(predicate, 3); + while (++index < length) { + var value = array[index]; + if (predicate(value, index, array)) { + result.push(value); + indexes.push(index); + } + } + basePullAt(array, indexes); + return result; + } + + /** + * Reverses `array` so that the first element becomes the last, the second + * element becomes the second to last, and so on. + * + * **Note:** This method mutates `array` and is based on + * [`Array#reverse`](https://mdn.io/Array/reverse). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to modify. + * @returns {Array} Returns `array`. + * @example + * + * var array = [1, 2, 3]; + * + * _.reverse(array); + * // => [3, 2, 1] + * + * console.log(array); + * // => [3, 2, 1] + */ + function reverse(array) { + return array == null ? array : nativeReverse.call(array); + } + + /** + * Creates a slice of `array` from `start` up to, but not including, `end`. + * + * **Note:** This method is used instead of + * [`Array#slice`](https://mdn.io/Array/slice) to ensure dense arrays are + * returned. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to slice. + * @param {number} [start=0] The start position. + * @param {number} [end=array.length] The end position. + * @returns {Array} Returns the slice of `array`. + */ + function slice(array, start, end) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + if (end && typeof end != 'number' && isIterateeCall(array, start, end)) { + start = 0; + end = length; + } + else { + start = start == null ? 0 : toInteger(start); + end = end === undefined ? length : toInteger(end); + } + return baseSlice(array, start, end); + } + + /** + * Uses a binary search to determine the lowest index at which `value` + * should be inserted into `array` in order to maintain its sort order. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + * @example + * + * _.sortedIndex([30, 50], 40); + * // => 1 + */ + function sortedIndex(array, value) { + return baseSortedIndex(array, value); + } + + /** + * This method is like `_.sortedIndex` except that it accepts `iteratee` + * which is invoked for `value` and each element of `array` to compute their + * sort ranking. The iteratee is invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + * @example + * + * var objects = [{ 'x': 4 }, { 'x': 5 }]; + * + * _.sortedIndexBy(objects, { 'x': 4 }, function(o) { return o.x; }); + * // => 0 + * + * // The `_.property` iteratee shorthand. + * _.sortedIndexBy(objects, { 'x': 4 }, 'x'); + * // => 0 + */ + function sortedIndexBy(array, value, iteratee) { + return baseSortedIndexBy(array, value, getIteratee(iteratee, 2)); + } + + /** + * This method is like `_.indexOf` except that it performs a binary + * search on a sorted `array`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @returns {number} Returns the index of the matched value, else `-1`. + * @example + * + * _.sortedIndexOf([4, 5, 5, 5, 6], 5); + * // => 1 + */ + function sortedIndexOf(array, value) { + var length = array == null ? 0 : array.length; + if (length) { + var index = baseSortedIndex(array, value); + if (index < length && eq(array[index], value)) { + return index; + } + } + return -1; + } + + /** + * This method is like `_.sortedIndex` except that it returns the highest + * index at which `value` should be inserted into `array` in order to + * maintain its sort order. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + * @example + * + * _.sortedLastIndex([4, 5, 5, 5, 6], 5); + * // => 4 + */ + function sortedLastIndex(array, value) { + return baseSortedIndex(array, value, true); + } + + /** + * This method is like `_.sortedLastIndex` except that it accepts `iteratee` + * which is invoked for `value` and each element of `array` to compute their + * sort ranking. The iteratee is invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The sorted array to inspect. + * @param {*} value The value to evaluate. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {number} Returns the index at which `value` should be inserted + * into `array`. + * @example + * + * var objects = [{ 'x': 4 }, { 'x': 5 }]; + * + * _.sortedLastIndexBy(objects, { 'x': 4 }, function(o) { return o.x; }); + * // => 1 + * + * // The `_.property` iteratee shorthand. + * _.sortedLastIndexBy(objects, { 'x': 4 }, 'x'); + * // => 1 + */ + function sortedLastIndexBy(array, value, iteratee) { + return baseSortedIndexBy(array, value, getIteratee(iteratee, 2), true); + } + + /** + * This method is like `_.lastIndexOf` except that it performs a binary + * search on a sorted `array`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @returns {number} Returns the index of the matched value, else `-1`. + * @example + * + * _.sortedLastIndexOf([4, 5, 5, 5, 6], 5); + * // => 3 + */ + function sortedLastIndexOf(array, value) { + var length = array == null ? 0 : array.length; + if (length) { + var index = baseSortedIndex(array, value, true) - 1; + if (eq(array[index], value)) { + return index; + } + } + return -1; + } + + /** + * This method is like `_.uniq` except that it's designed and optimized + * for sorted arrays. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @returns {Array} Returns the new duplicate free array. + * @example + * + * _.sortedUniq([1, 1, 2]); + * // => [1, 2] + */ + function sortedUniq(array) { + return (array && array.length) + ? baseSortedUniq(array) + : []; + } + + /** + * This method is like `_.uniqBy` except that it's designed and optimized + * for sorted arrays. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @returns {Array} Returns the new duplicate free array. + * @example + * + * _.sortedUniqBy([1.1, 1.2, 2.3, 2.4], Math.floor); + * // => [1.1, 2.3] + */ + function sortedUniqBy(array, iteratee) { + return (array && array.length) + ? baseSortedUniq(array, getIteratee(iteratee, 2)) + : []; + } + + /** + * Gets all but the first element of `array`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to query. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.tail([1, 2, 3]); + * // => [2, 3] + */ + function tail(array) { + var length = array == null ? 0 : array.length; + return length ? baseSlice(array, 1, length) : []; + } + + /** + * Creates a slice of `array` with `n` elements taken from the beginning. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to query. + * @param {number} [n=1] The number of elements to take. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.take([1, 2, 3]); + * // => [1] + * + * _.take([1, 2, 3], 2); + * // => [1, 2] + * + * _.take([1, 2, 3], 5); + * // => [1, 2, 3] + * + * _.take([1, 2, 3], 0); + * // => [] + */ + function take(array, n, guard) { + if (!(array && array.length)) { + return []; + } + n = (guard || n === undefined) ? 1 : toInteger(n); + return baseSlice(array, 0, n < 0 ? 0 : n); + } + + /** + * Creates a slice of `array` with `n` elements taken from the end. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {number} [n=1] The number of elements to take. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the slice of `array`. + * @example + * + * _.takeRight([1, 2, 3]); + * // => [3] + * + * _.takeRight([1, 2, 3], 2); + * // => [2, 3] + * + * _.takeRight([1, 2, 3], 5); + * // => [1, 2, 3] + * + * _.takeRight([1, 2, 3], 0); + * // => [] + */ + function takeRight(array, n, guard) { + var length = array == null ? 0 : array.length; + if (!length) { + return []; + } + n = (guard || n === undefined) ? 1 : toInteger(n); + n = length - n; + return baseSlice(array, n < 0 ? 0 : n, length); + } + + /** + * Creates a slice of `array` with elements taken from the end. Elements are + * taken until `predicate` returns falsey. The predicate is invoked with + * three arguments: (value, index, array). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the slice of `array`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': true }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': false } + * ]; + * + * _.takeRightWhile(users, function(o) { return !o.active; }); + * // => objects for ['fred', 'pebbles'] + * + * // The `_.matches` iteratee shorthand. + * _.takeRightWhile(users, { 'user': 'pebbles', 'active': false }); + * // => objects for ['pebbles'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.takeRightWhile(users, ['active', false]); + * // => objects for ['fred', 'pebbles'] + * + * // The `_.property` iteratee shorthand. + * _.takeRightWhile(users, 'active'); + * // => [] + */ + function takeRightWhile(array, predicate) { + return (array && array.length) + ? baseWhile(array, getIteratee(predicate, 3), false, true) + : []; + } + + /** + * Creates a slice of `array` with elements taken from the beginning. Elements + * are taken until `predicate` returns falsey. The predicate is invoked with + * three arguments: (value, index, array). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Array + * @param {Array} array The array to query. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the slice of `array`. + * @example + * + * var users = [ + * { 'user': 'barney', 'active': false }, + * { 'user': 'fred', 'active': false }, + * { 'user': 'pebbles', 'active': true } + * ]; + * + * _.takeWhile(users, function(o) { return !o.active; }); + * // => objects for ['barney', 'fred'] + * + * // The `_.matches` iteratee shorthand. + * _.takeWhile(users, { 'user': 'barney', 'active': false }); + * // => objects for ['barney'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.takeWhile(users, ['active', false]); + * // => objects for ['barney', 'fred'] + * + * // The `_.property` iteratee shorthand. + * _.takeWhile(users, 'active'); + * // => [] + */ + function takeWhile(array, predicate) { + return (array && array.length) + ? baseWhile(array, getIteratee(predicate, 3)) + : []; + } + + /** + * Creates an array of unique values, in order, from all given arrays using + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @returns {Array} Returns the new array of combined values. + * @example + * + * _.union([2], [1, 2]); + * // => [2, 1] + */ + var union = baseRest(function(arrays) { + return baseUniq(baseFlatten(arrays, 1, isArrayLikeObject, true)); + }); + + /** + * This method is like `_.union` except that it accepts `iteratee` which is + * invoked for each element of each `arrays` to generate the criterion by + * which uniqueness is computed. Result values are chosen from the first + * array in which the value occurs. The iteratee is invoked with one argument: + * (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns the new array of combined values. + * @example + * + * _.unionBy([2.1], [1.2, 2.3], Math.floor); + * // => [2.1, 1.2] + * + * // The `_.property` iteratee shorthand. + * _.unionBy([{ 'x': 1 }], [{ 'x': 2 }, { 'x': 1 }], 'x'); + * // => [{ 'x': 1 }, { 'x': 2 }] + */ + var unionBy = baseRest(function(arrays) { + var iteratee = last(arrays); + if (isArrayLikeObject(iteratee)) { + iteratee = undefined; + } + return baseUniq(baseFlatten(arrays, 1, isArrayLikeObject, true), getIteratee(iteratee, 2)); + }); + + /** + * This method is like `_.union` except that it accepts `comparator` which + * is invoked to compare elements of `arrays`. Result values are chosen from + * the first array in which the value occurs. The comparator is invoked + * with two arguments: (arrVal, othVal). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of combined values. + * @example + * + * var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }]; + * var others = [{ 'x': 1, 'y': 1 }, { 'x': 1, 'y': 2 }]; + * + * _.unionWith(objects, others, _.isEqual); + * // => [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }, { 'x': 1, 'y': 1 }] + */ + var unionWith = baseRest(function(arrays) { + var comparator = last(arrays); + comparator = typeof comparator == 'function' ? comparator : undefined; + return baseUniq(baseFlatten(arrays, 1, isArrayLikeObject, true), undefined, comparator); + }); + + /** + * Creates a duplicate-free version of an array, using + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons, in which only the first occurrence of each element + * is kept. The order of result values is determined by the order they occur + * in the array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @returns {Array} Returns the new duplicate free array. + * @example + * + * _.uniq([2, 1, 2]); + * // => [2, 1] + */ + function uniq(array) { + return (array && array.length) ? baseUniq(array) : []; + } + + /** + * This method is like `_.uniq` except that it accepts `iteratee` which is + * invoked for each element in `array` to generate the criterion by which + * uniqueness is computed. The order of result values is determined by the + * order they occur in the array. The iteratee is invoked with one argument: + * (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns the new duplicate free array. + * @example + * + * _.uniqBy([2.1, 1.2, 2.3], Math.floor); + * // => [2.1, 1.2] + * + * // The `_.property` iteratee shorthand. + * _.uniqBy([{ 'x': 1 }, { 'x': 2 }, { 'x': 1 }], 'x'); + * // => [{ 'x': 1 }, { 'x': 2 }] + */ + function uniqBy(array, iteratee) { + return (array && array.length) ? baseUniq(array, getIteratee(iteratee, 2)) : []; + } + + /** + * This method is like `_.uniq` except that it accepts `comparator` which + * is invoked to compare elements of `array`. The order of result values is + * determined by the order they occur in the array.The comparator is invoked + * with two arguments: (arrVal, othVal). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new duplicate free array. + * @example + * + * var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }, { 'x': 1, 'y': 2 }]; + * + * _.uniqWith(objects, _.isEqual); + * // => [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }] + */ + function uniqWith(array, comparator) { + comparator = typeof comparator == 'function' ? comparator : undefined; + return (array && array.length) ? baseUniq(array, undefined, comparator) : []; + } + + /** + * This method is like `_.zip` except that it accepts an array of grouped + * elements and creates an array regrouping the elements to their pre-zip + * configuration. + * + * @static + * @memberOf _ + * @since 1.2.0 + * @category Array + * @param {Array} array The array of grouped elements to process. + * @returns {Array} Returns the new array of regrouped elements. + * @example + * + * var zipped = _.zip(['a', 'b'], [1, 2], [true, false]); + * // => [['a', 1, true], ['b', 2, false]] + * + * _.unzip(zipped); + * // => [['a', 'b'], [1, 2], [true, false]] + */ + function unzip(array) { + if (!(array && array.length)) { + return []; + } + var length = 0; + array = arrayFilter(array, function(group) { + if (isArrayLikeObject(group)) { + length = nativeMax(group.length, length); + return true; + } + }); + return baseTimes(length, function(index) { + return arrayMap(array, baseProperty(index)); + }); + } + + /** + * This method is like `_.unzip` except that it accepts `iteratee` to specify + * how regrouped values should be combined. The iteratee is invoked with the + * elements of each group: (...group). + * + * @static + * @memberOf _ + * @since 3.8.0 + * @category Array + * @param {Array} array The array of grouped elements to process. + * @param {Function} [iteratee=_.identity] The function to combine + * regrouped values. + * @returns {Array} Returns the new array of regrouped elements. + * @example + * + * var zipped = _.zip([1, 2], [10, 20], [100, 200]); + * // => [[1, 10, 100], [2, 20, 200]] + * + * _.unzipWith(zipped, _.add); + * // => [3, 30, 300] + */ + function unzipWith(array, iteratee) { + if (!(array && array.length)) { + return []; + } + var result = unzip(array); + if (iteratee == null) { + return result; + } + return arrayMap(result, function(group) { + return apply(iteratee, undefined, group); + }); + } + + /** + * Creates an array excluding all given values using + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons. + * + * **Note:** Unlike `_.pull`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @param {...*} [values] The values to exclude. + * @returns {Array} Returns the new array of filtered values. + * @see _.difference, _.xor + * @example + * + * _.without([2, 1, 2, 3], 1, 2); + * // => [3] + */ + var without = baseRest(function(array, values) { + return isArrayLikeObject(array) + ? baseDifference(array, values) + : []; + }); + + /** + * Creates an array of unique values that is the + * [symmetric difference](https://en.wikipedia.org/wiki/Symmetric_difference) + * of the given arrays. The order of result values is determined by the order + * they occur in the arrays. + * + * @static + * @memberOf _ + * @since 2.4.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @returns {Array} Returns the new array of filtered values. + * @see _.difference, _.without + * @example + * + * _.xor([2, 1], [2, 3]); + * // => [1, 3] + */ + var xor = baseRest(function(arrays) { + return baseXor(arrayFilter(arrays, isArrayLikeObject)); + }); + + /** + * This method is like `_.xor` except that it accepts `iteratee` which is + * invoked for each element of each `arrays` to generate the criterion by + * which by which they're compared. The order of result values is determined + * by the order they occur in the arrays. The iteratee is invoked with one + * argument: (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * _.xorBy([2.1, 1.2], [2.3, 3.4], Math.floor); + * // => [1.2, 3.4] + * + * // The `_.property` iteratee shorthand. + * _.xorBy([{ 'x': 1 }], [{ 'x': 2 }, { 'x': 1 }], 'x'); + * // => [{ 'x': 2 }] + */ + var xorBy = baseRest(function(arrays) { + var iteratee = last(arrays); + if (isArrayLikeObject(iteratee)) { + iteratee = undefined; + } + return baseXor(arrayFilter(arrays, isArrayLikeObject), getIteratee(iteratee, 2)); + }); + + /** + * This method is like `_.xor` except that it accepts `comparator` which is + * invoked to compare elements of `arrays`. The order of result values is + * determined by the order they occur in the arrays. The comparator is invoked + * with two arguments: (arrVal, othVal). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Array + * @param {...Array} [arrays] The arrays to inspect. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new array of filtered values. + * @example + * + * var objects = [{ 'x': 1, 'y': 2 }, { 'x': 2, 'y': 1 }]; + * var others = [{ 'x': 1, 'y': 1 }, { 'x': 1, 'y': 2 }]; + * + * _.xorWith(objects, others, _.isEqual); + * // => [{ 'x': 2, 'y': 1 }, { 'x': 1, 'y': 1 }] + */ + var xorWith = baseRest(function(arrays) { + var comparator = last(arrays); + comparator = typeof comparator == 'function' ? comparator : undefined; + return baseXor(arrayFilter(arrays, isArrayLikeObject), undefined, comparator); + }); + + /** + * Creates an array of grouped elements, the first of which contains the + * first elements of the given arrays, the second of which contains the + * second elements of the given arrays, and so on. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {...Array} [arrays] The arrays to process. + * @returns {Array} Returns the new array of grouped elements. + * @example + * + * _.zip(['a', 'b'], [1, 2], [true, false]); + * // => [['a', 1, true], ['b', 2, false]] + */ + var zip = baseRest(unzip); + + /** + * This method is like `_.fromPairs` except that it accepts two arrays, + * one of property identifiers and one of corresponding values. + * + * @static + * @memberOf _ + * @since 0.4.0 + * @category Array + * @param {Array} [props=[]] The property identifiers. + * @param {Array} [values=[]] The property values. + * @returns {Object} Returns the new object. + * @example + * + * _.zipObject(['a', 'b'], [1, 2]); + * // => { 'a': 1, 'b': 2 } + */ + function zipObject(props, values) { + return baseZipObject(props || [], values || [], assignValue); + } + + /** + * This method is like `_.zipObject` except that it supports property paths. + * + * @static + * @memberOf _ + * @since 4.1.0 + * @category Array + * @param {Array} [props=[]] The property identifiers. + * @param {Array} [values=[]] The property values. + * @returns {Object} Returns the new object. + * @example + * + * _.zipObjectDeep(['a.b[0].c', 'a.b[1].d'], [1, 2]); + * // => { 'a': { 'b': [{ 'c': 1 }, { 'd': 2 }] } } + */ + function zipObjectDeep(props, values) { + return baseZipObject(props || [], values || [], baseSet); + } + + /** + * This method is like `_.zip` except that it accepts `iteratee` to specify + * how grouped values should be combined. The iteratee is invoked with the + * elements of each group: (...group). + * + * @static + * @memberOf _ + * @since 3.8.0 + * @category Array + * @param {...Array} [arrays] The arrays to process. + * @param {Function} [iteratee=_.identity] The function to combine + * grouped values. + * @returns {Array} Returns the new array of grouped elements. + * @example + * + * _.zipWith([1, 2], [10, 20], [100, 200], function(a, b, c) { + * return a + b + c; + * }); + * // => [111, 222] + */ + var zipWith = baseRest(function(arrays) { + var length = arrays.length, + iteratee = length > 1 ? arrays[length - 1] : undefined; + + iteratee = typeof iteratee == 'function' ? (arrays.pop(), iteratee) : undefined; + return unzipWith(arrays, iteratee); + }); + + /*------------------------------------------------------------------------*/ + + /** + * Creates a `lodash` wrapper instance that wraps `value` with explicit method + * chain sequences enabled. The result of such sequences must be unwrapped + * with `_#value`. + * + * @static + * @memberOf _ + * @since 1.3.0 + * @category Seq + * @param {*} value The value to wrap. + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36 }, + * { 'user': 'fred', 'age': 40 }, + * { 'user': 'pebbles', 'age': 1 } + * ]; + * + * var youngest = _ + * .chain(users) + * .sortBy('age') + * .map(function(o) { + * return o.user + ' is ' + o.age; + * }) + * .head() + * .value(); + * // => 'pebbles is 1' + */ + function chain(value) { + var result = lodash(value); + result.__chain__ = true; + return result; + } + + /** + * This method invokes `interceptor` and returns `value`. The interceptor + * is invoked with one argument; (value). The purpose of this method is to + * "tap into" a method chain sequence in order to modify intermediate results. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Seq + * @param {*} value The value to provide to `interceptor`. + * @param {Function} interceptor The function to invoke. + * @returns {*} Returns `value`. + * @example + * + * _([1, 2, 3]) + * .tap(function(array) { + * // Mutate input array. + * array.pop(); + * }) + * .reverse() + * .value(); + * // => [2, 1] + */ + function tap(value, interceptor) { + interceptor(value); + return value; + } + + /** + * This method is like `_.tap` except that it returns the result of `interceptor`. + * The purpose of this method is to "pass thru" values replacing intermediate + * results in a method chain sequence. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Seq + * @param {*} value The value to provide to `interceptor`. + * @param {Function} interceptor The function to invoke. + * @returns {*} Returns the result of `interceptor`. + * @example + * + * _(' abc ') + * .chain() + * .trim() + * .thru(function(value) { + * return [value]; + * }) + * .value(); + * // => ['abc'] + */ + function thru(value, interceptor) { + return interceptor(value); + } + + /** + * This method is the wrapper version of `_.at`. + * + * @name at + * @memberOf _ + * @since 1.0.0 + * @category Seq + * @param {...(string|string[])} [paths] The property paths to pick. + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 3 } }, 4] }; + * + * _(object).at(['a[0].b.c', 'a[1]']).value(); + * // => [3, 4] + */ + var wrapperAt = flatRest(function(paths) { + var length = paths.length, + start = length ? paths[0] : 0, + value = this.__wrapped__, + interceptor = function(object) { return baseAt(object, paths); }; + + if (length > 1 || this.__actions__.length || + !(value instanceof LazyWrapper) || !isIndex(start)) { + return this.thru(interceptor); + } + value = value.slice(start, +start + (length ? 1 : 0)); + value.__actions__.push({ + 'func': thru, + 'args': [interceptor], + 'thisArg': undefined + }); + return new LodashWrapper(value, this.__chain__).thru(function(array) { + if (length && !array.length) { + array.push(undefined); + } + return array; + }); + }); + + /** + * Creates a `lodash` wrapper instance with explicit method chain sequences enabled. + * + * @name chain + * @memberOf _ + * @since 0.1.0 + * @category Seq + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36 }, + * { 'user': 'fred', 'age': 40 } + * ]; + * + * // A sequence without explicit chaining. + * _(users).head(); + * // => { 'user': 'barney', 'age': 36 } + * + * // A sequence with explicit chaining. + * _(users) + * .chain() + * .head() + * .pick('user') + * .value(); + * // => { 'user': 'barney' } + */ + function wrapperChain() { + return chain(this); + } + + /** + * Executes the chain sequence and returns the wrapped result. + * + * @name commit + * @memberOf _ + * @since 3.2.0 + * @category Seq + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var array = [1, 2]; + * var wrapped = _(array).push(3); + * + * console.log(array); + * // => [1, 2] + * + * wrapped = wrapped.commit(); + * console.log(array); + * // => [1, 2, 3] + * + * wrapped.last(); + * // => 3 + * + * console.log(array); + * // => [1, 2, 3] + */ + function wrapperCommit() { + return new LodashWrapper(this.value(), this.__chain__); + } + + /** + * Gets the next value on a wrapped object following the + * [iterator protocol](https://mdn.io/iteration_protocols#iterator). + * + * @name next + * @memberOf _ + * @since 4.0.0 + * @category Seq + * @returns {Object} Returns the next iterator value. + * @example + * + * var wrapped = _([1, 2]); + * + * wrapped.next(); + * // => { 'done': false, 'value': 1 } + * + * wrapped.next(); + * // => { 'done': false, 'value': 2 } + * + * wrapped.next(); + * // => { 'done': true, 'value': undefined } + */ + function wrapperNext() { + if (this.__values__ === undefined) { + this.__values__ = toArray(this.value()); + } + var done = this.__index__ >= this.__values__.length, + value = done ? undefined : this.__values__[this.__index__++]; + + return { 'done': done, 'value': value }; + } + + /** + * Enables the wrapper to be iterable. + * + * @name Symbol.iterator + * @memberOf _ + * @since 4.0.0 + * @category Seq + * @returns {Object} Returns the wrapper object. + * @example + * + * var wrapped = _([1, 2]); + * + * wrapped[Symbol.iterator]() === wrapped; + * // => true + * + * Array.from(wrapped); + * // => [1, 2] + */ + function wrapperToIterator() { + return this; + } + + /** + * Creates a clone of the chain sequence planting `value` as the wrapped value. + * + * @name plant + * @memberOf _ + * @since 3.2.0 + * @category Seq + * @param {*} value The value to plant. + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * function square(n) { + * return n * n; + * } + * + * var wrapped = _([1, 2]).map(square); + * var other = wrapped.plant([3, 4]); + * + * other.value(); + * // => [9, 16] + * + * wrapped.value(); + * // => [1, 4] + */ + function wrapperPlant(value) { + var result, + parent = this; + + while (parent instanceof baseLodash) { + var clone = wrapperClone(parent); + clone.__index__ = 0; + clone.__values__ = undefined; + if (result) { + previous.__wrapped__ = clone; + } else { + result = clone; + } + var previous = clone; + parent = parent.__wrapped__; + } + previous.__wrapped__ = value; + return result; + } + + /** + * This method is the wrapper version of `_.reverse`. + * + * **Note:** This method mutates the wrapped array. + * + * @name reverse + * @memberOf _ + * @since 0.1.0 + * @category Seq + * @returns {Object} Returns the new `lodash` wrapper instance. + * @example + * + * var array = [1, 2, 3]; + * + * _(array).reverse().value() + * // => [3, 2, 1] + * + * console.log(array); + * // => [3, 2, 1] + */ + function wrapperReverse() { + var value = this.__wrapped__; + if (value instanceof LazyWrapper) { + var wrapped = value; + if (this.__actions__.length) { + wrapped = new LazyWrapper(this); + } + wrapped = wrapped.reverse(); + wrapped.__actions__.push({ + 'func': thru, + 'args': [reverse], + 'thisArg': undefined + }); + return new LodashWrapper(wrapped, this.__chain__); + } + return this.thru(reverse); + } + + /** + * Executes the chain sequence to resolve the unwrapped value. + * + * @name value + * @memberOf _ + * @since 0.1.0 + * @alias toJSON, valueOf + * @category Seq + * @returns {*} Returns the resolved unwrapped value. + * @example + * + * _([1, 2, 3]).value(); + * // => [1, 2, 3] + */ + function wrapperValue() { + return baseWrapperValue(this.__wrapped__, this.__actions__); + } + + /*------------------------------------------------------------------------*/ + + /** + * Creates an object composed of keys generated from the results of running + * each element of `collection` thru `iteratee`. The corresponding value of + * each key is the number of times the key was returned by `iteratee`. The + * iteratee is invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 0.5.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The iteratee to transform keys. + * @returns {Object} Returns the composed aggregate object. + * @example + * + * _.countBy([6.1, 4.2, 6.3], Math.floor); + * // => { '4': 1, '6': 2 } + * + * // The `_.property` iteratee shorthand. + * _.countBy(['one', 'two', 'three'], 'length'); + * // => { '3': 2, '5': 1 } + */ + var countBy = createAggregator(function(result, value, key) { + if (hasOwnProperty.call(result, key)) { + ++result[key]; + } else { + baseAssignValue(result, key, 1); + } + }); + + /** + * Checks if `predicate` returns truthy for **all** elements of `collection`. + * Iteration is stopped once `predicate` returns falsey. The predicate is + * invoked with three arguments: (value, index|key, collection). + * + * **Note:** This method returns `true` for + * [empty collections](https://en.wikipedia.org/wiki/Empty_set) because + * [everything is true](https://en.wikipedia.org/wiki/Vacuous_truth) of + * elements of empty collections. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {boolean} Returns `true` if all elements pass the predicate check, + * else `false`. + * @example + * + * _.every([true, 1, null, 'yes'], Boolean); + * // => false + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': false }, + * { 'user': 'fred', 'age': 40, 'active': false } + * ]; + * + * // The `_.matches` iteratee shorthand. + * _.every(users, { 'user': 'barney', 'active': false }); + * // => false + * + * // The `_.matchesProperty` iteratee shorthand. + * _.every(users, ['active', false]); + * // => true + * + * // The `_.property` iteratee shorthand. + * _.every(users, 'active'); + * // => false + */ + function every(collection, predicate, guard) { + var func = isArray(collection) ? arrayEvery : baseEvery; + if (guard && isIterateeCall(collection, predicate, guard)) { + predicate = undefined; + } + return func(collection, getIteratee(predicate, 3)); + } + + /** + * Iterates over elements of `collection`, returning an array of all elements + * `predicate` returns truthy for. The predicate is invoked with three + * arguments: (value, index|key, collection). + * + * **Note:** Unlike `_.remove`, this method returns a new array. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new filtered array. + * @see _.reject + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': true }, + * { 'user': 'fred', 'age': 40, 'active': false } + * ]; + * + * _.filter(users, function(o) { return !o.active; }); + * // => objects for ['fred'] + * + * // The `_.matches` iteratee shorthand. + * _.filter(users, { 'age': 36, 'active': true }); + * // => objects for ['barney'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.filter(users, ['active', false]); + * // => objects for ['fred'] + * + * // The `_.property` iteratee shorthand. + * _.filter(users, 'active'); + * // => objects for ['barney'] + * + * // Combining several predicates using `_.overEvery` or `_.overSome`. + * _.filter(users, _.overSome([{ 'age': 36 }, ['age', 40]])); + * // => objects for ['fred', 'barney'] + */ + function filter(collection, predicate) { + var func = isArray(collection) ? arrayFilter : baseFilter; + return func(collection, getIteratee(predicate, 3)); + } + + /** + * Iterates over elements of `collection`, returning the first element + * `predicate` returns truthy for. The predicate is invoked with three + * arguments: (value, index|key, collection). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param {number} [fromIndex=0] The index to search from. + * @returns {*} Returns the matched element, else `undefined`. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': true }, + * { 'user': 'fred', 'age': 40, 'active': false }, + * { 'user': 'pebbles', 'age': 1, 'active': true } + * ]; + * + * _.find(users, function(o) { return o.age < 40; }); + * // => object for 'barney' + * + * // The `_.matches` iteratee shorthand. + * _.find(users, { 'age': 1, 'active': true }); + * // => object for 'pebbles' + * + * // The `_.matchesProperty` iteratee shorthand. + * _.find(users, ['active', false]); + * // => object for 'fred' + * + * // The `_.property` iteratee shorthand. + * _.find(users, 'active'); + * // => object for 'barney' + */ + var find = createFind(findIndex); + + /** + * This method is like `_.find` except that it iterates over elements of + * `collection` from right to left. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Collection + * @param {Array|Object} collection The collection to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param {number} [fromIndex=collection.length-1] The index to search from. + * @returns {*} Returns the matched element, else `undefined`. + * @example + * + * _.findLast([1, 2, 3, 4], function(n) { + * return n % 2 == 1; + * }); + * // => 3 + */ + var findLast = createFind(findLastIndex); + + /** + * Creates a flattened array of values by running each element in `collection` + * thru `iteratee` and flattening the mapped results. The iteratee is invoked + * with three arguments: (value, index|key, collection). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new flattened array. + * @example + * + * function duplicate(n) { + * return [n, n]; + * } + * + * _.flatMap([1, 2], duplicate); + * // => [1, 1, 2, 2] + */ + function flatMap(collection, iteratee) { + return baseFlatten(map(collection, iteratee), 1); + } + + /** + * This method is like `_.flatMap` except that it recursively flattens the + * mapped results. + * + * @static + * @memberOf _ + * @since 4.7.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new flattened array. + * @example + * + * function duplicate(n) { + * return [[[n, n]]]; + * } + * + * _.flatMapDeep([1, 2], duplicate); + * // => [1, 1, 2, 2] + */ + function flatMapDeep(collection, iteratee) { + return baseFlatten(map(collection, iteratee), INFINITY); + } + + /** + * This method is like `_.flatMap` except that it recursively flattens the + * mapped results up to `depth` times. + * + * @static + * @memberOf _ + * @since 4.7.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @param {number} [depth=1] The maximum recursion depth. + * @returns {Array} Returns the new flattened array. + * @example + * + * function duplicate(n) { + * return [[[n, n]]]; + * } + * + * _.flatMapDepth([1, 2], duplicate, 2); + * // => [[1, 1], [2, 2]] + */ + function flatMapDepth(collection, iteratee, depth) { + depth = depth === undefined ? 1 : toInteger(depth); + return baseFlatten(map(collection, iteratee), depth); + } + + /** + * Iterates over elements of `collection` and invokes `iteratee` for each element. + * The iteratee is invoked with three arguments: (value, index|key, collection). + * Iteratee functions may exit iteration early by explicitly returning `false`. + * + * **Note:** As with other "Collections" methods, objects with a "length" + * property are iterated like arrays. To avoid this behavior use `_.forIn` + * or `_.forOwn` for object iteration. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @alias each + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array|Object} Returns `collection`. + * @see _.forEachRight + * @example + * + * _.forEach([1, 2], function(value) { + * console.log(value); + * }); + * // => Logs `1` then `2`. + * + * _.forEach({ 'a': 1, 'b': 2 }, function(value, key) { + * console.log(key); + * }); + * // => Logs 'a' then 'b' (iteration order is not guaranteed). + */ + function forEach(collection, iteratee) { + var func = isArray(collection) ? arrayEach : baseEach; + return func(collection, getIteratee(iteratee, 3)); + } + + /** + * This method is like `_.forEach` except that it iterates over elements of + * `collection` from right to left. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @alias eachRight + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array|Object} Returns `collection`. + * @see _.forEach + * @example + * + * _.forEachRight([1, 2], function(value) { + * console.log(value); + * }); + * // => Logs `2` then `1`. + */ + function forEachRight(collection, iteratee) { + var func = isArray(collection) ? arrayEachRight : baseEachRight; + return func(collection, getIteratee(iteratee, 3)); + } + + /** + * Creates an object composed of keys generated from the results of running + * each element of `collection` thru `iteratee`. The order of grouped values + * is determined by the order they occur in `collection`. The corresponding + * value of each key is an array of elements responsible for generating the + * key. The iteratee is invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The iteratee to transform keys. + * @returns {Object} Returns the composed aggregate object. + * @example + * + * _.groupBy([6.1, 4.2, 6.3], Math.floor); + * // => { '4': [4.2], '6': [6.1, 6.3] } + * + * // The `_.property` iteratee shorthand. + * _.groupBy(['one', 'two', 'three'], 'length'); + * // => { '3': ['one', 'two'], '5': ['three'] } + */ + var groupBy = createAggregator(function(result, value, key) { + if (hasOwnProperty.call(result, key)) { + result[key].push(value); + } else { + baseAssignValue(result, key, [value]); + } + }); + + /** + * Checks if `value` is in `collection`. If `collection` is a string, it's + * checked for a substring of `value`, otherwise + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * is used for equality comparisons. If `fromIndex` is negative, it's used as + * the offset from the end of `collection`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object|string} collection The collection to inspect. + * @param {*} value The value to search for. + * @param {number} [fromIndex=0] The index to search from. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.reduce`. + * @returns {boolean} Returns `true` if `value` is found, else `false`. + * @example + * + * _.includes([1, 2, 3], 1); + * // => true + * + * _.includes([1, 2, 3], 1, 2); + * // => false + * + * _.includes({ 'a': 1, 'b': 2 }, 1); + * // => true + * + * _.includes('abcd', 'bc'); + * // => true + */ + function includes(collection, value, fromIndex, guard) { + collection = isArrayLike(collection) ? collection : values(collection); + fromIndex = (fromIndex && !guard) ? toInteger(fromIndex) : 0; + + var length = collection.length; + if (fromIndex < 0) { + fromIndex = nativeMax(length + fromIndex, 0); + } + return isString(collection) + ? (fromIndex <= length && collection.indexOf(value, fromIndex) > -1) + : (!!length && baseIndexOf(collection, value, fromIndex) > -1); + } + + /** + * Invokes the method at `path` of each element in `collection`, returning + * an array of the results of each invoked method. Any additional arguments + * are provided to each invoked method. If `path` is a function, it's invoked + * for, and `this` bound to, each element in `collection`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Array|Function|string} path The path of the method to invoke or + * the function invoked per iteration. + * @param {...*} [args] The arguments to invoke each method with. + * @returns {Array} Returns the array of results. + * @example + * + * _.invokeMap([[5, 1, 7], [3, 2, 1]], 'sort'); + * // => [[1, 5, 7], [1, 2, 3]] + * + * _.invokeMap([123, 456], String.prototype.split, ''); + * // => [['1', '2', '3'], ['4', '5', '6']] + */ + var invokeMap = baseRest(function(collection, path, args) { + var index = -1, + isFunc = typeof path == 'function', + result = isArrayLike(collection) ? Array(collection.length) : []; + + baseEach(collection, function(value) { + result[++index] = isFunc ? apply(path, value, args) : baseInvoke(value, path, args); + }); + return result; + }); + + /** + * Creates an object composed of keys generated from the results of running + * each element of `collection` thru `iteratee`. The corresponding value of + * each key is the last element responsible for generating the key. The + * iteratee is invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The iteratee to transform keys. + * @returns {Object} Returns the composed aggregate object. + * @example + * + * var array = [ + * { 'dir': 'left', 'code': 97 }, + * { 'dir': 'right', 'code': 100 } + * ]; + * + * _.keyBy(array, function(o) { + * return String.fromCharCode(o.code); + * }); + * // => { 'a': { 'dir': 'left', 'code': 97 }, 'd': { 'dir': 'right', 'code': 100 } } + * + * _.keyBy(array, 'dir'); + * // => { 'left': { 'dir': 'left', 'code': 97 }, 'right': { 'dir': 'right', 'code': 100 } } + */ + var keyBy = createAggregator(function(result, value, key) { + baseAssignValue(result, key, value); + }); + + /** + * Creates an array of values by running each element in `collection` thru + * `iteratee`. The iteratee is invoked with three arguments: + * (value, index|key, collection). + * + * Many lodash methods are guarded to work as iteratees for methods like + * `_.every`, `_.filter`, `_.map`, `_.mapValues`, `_.reject`, and `_.some`. + * + * The guarded methods are: + * `ary`, `chunk`, `curry`, `curryRight`, `drop`, `dropRight`, `every`, + * `fill`, `invert`, `parseInt`, `random`, `range`, `rangeRight`, `repeat`, + * `sampleSize`, `slice`, `some`, `sortBy`, `split`, `take`, `takeRight`, + * `template`, `trim`, `trimEnd`, `trimStart`, and `words` + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new mapped array. + * @example + * + * function square(n) { + * return n * n; + * } + * + * _.map([4, 8], square); + * // => [16, 64] + * + * _.map({ 'a': 4, 'b': 8 }, square); + * // => [16, 64] (iteration order is not guaranteed) + * + * var users = [ + * { 'user': 'barney' }, + * { 'user': 'fred' } + * ]; + * + * // The `_.property` iteratee shorthand. + * _.map(users, 'user'); + * // => ['barney', 'fred'] + */ + function map(collection, iteratee) { + var func = isArray(collection) ? arrayMap : baseMap; + return func(collection, getIteratee(iteratee, 3)); + } + + /** + * This method is like `_.sortBy` except that it allows specifying the sort + * orders of the iteratees to sort by. If `orders` is unspecified, all values + * are sorted in ascending order. Otherwise, specify an order of "desc" for + * descending or "asc" for ascending sort order of corresponding values. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Array[]|Function[]|Object[]|string[]} [iteratees=[_.identity]] + * The iteratees to sort by. + * @param {string[]} [orders] The sort orders of `iteratees`. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.reduce`. + * @returns {Array} Returns the new sorted array. + * @example + * + * var users = [ + * { 'user': 'fred', 'age': 48 }, + * { 'user': 'barney', 'age': 34 }, + * { 'user': 'fred', 'age': 40 }, + * { 'user': 'barney', 'age': 36 } + * ]; + * + * // Sort by `user` in ascending order and by `age` in descending order. + * _.orderBy(users, ['user', 'age'], ['asc', 'desc']); + * // => objects for [['barney', 36], ['barney', 34], ['fred', 48], ['fred', 40]] + */ + function orderBy(collection, iteratees, orders, guard) { + if (collection == null) { + return []; + } + if (!isArray(iteratees)) { + iteratees = iteratees == null ? [] : [iteratees]; + } + orders = guard ? undefined : orders; + if (!isArray(orders)) { + orders = orders == null ? [] : [orders]; + } + return baseOrderBy(collection, iteratees, orders); + } + + /** + * Creates an array of elements split into two groups, the first of which + * contains elements `predicate` returns truthy for, the second of which + * contains elements `predicate` returns falsey for. The predicate is + * invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the array of grouped elements. + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': false }, + * { 'user': 'fred', 'age': 40, 'active': true }, + * { 'user': 'pebbles', 'age': 1, 'active': false } + * ]; + * + * _.partition(users, function(o) { return o.active; }); + * // => objects for [['fred'], ['barney', 'pebbles']] + * + * // The `_.matches` iteratee shorthand. + * _.partition(users, { 'age': 1, 'active': false }); + * // => objects for [['pebbles'], ['barney', 'fred']] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.partition(users, ['active', false]); + * // => objects for [['barney', 'pebbles'], ['fred']] + * + * // The `_.property` iteratee shorthand. + * _.partition(users, 'active'); + * // => objects for [['fred'], ['barney', 'pebbles']] + */ + var partition = createAggregator(function(result, value, key) { + result[key ? 0 : 1].push(value); + }, function() { return [[], []]; }); + + /** + * Reduces `collection` to a value which is the accumulated result of running + * each element in `collection` thru `iteratee`, where each successive + * invocation is supplied the return value of the previous. If `accumulator` + * is not given, the first element of `collection` is used as the initial + * value. The iteratee is invoked with four arguments: + * (accumulator, value, index|key, collection). + * + * Many lodash methods are guarded to work as iteratees for methods like + * `_.reduce`, `_.reduceRight`, and `_.transform`. + * + * The guarded methods are: + * `assign`, `defaults`, `defaultsDeep`, `includes`, `merge`, `orderBy`, + * and `sortBy` + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @param {*} [accumulator] The initial value. + * @returns {*} Returns the accumulated value. + * @see _.reduceRight + * @example + * + * _.reduce([1, 2], function(sum, n) { + * return sum + n; + * }, 0); + * // => 3 + * + * _.reduce({ 'a': 1, 'b': 2, 'c': 1 }, function(result, value, key) { + * (result[value] || (result[value] = [])).push(key); + * return result; + * }, {}); + * // => { '1': ['a', 'c'], '2': ['b'] } (iteration order is not guaranteed) + */ + function reduce(collection, iteratee, accumulator) { + var func = isArray(collection) ? arrayReduce : baseReduce, + initAccum = arguments.length < 3; + + return func(collection, getIteratee(iteratee, 4), accumulator, initAccum, baseEach); + } + + /** + * This method is like `_.reduce` except that it iterates over elements of + * `collection` from right to left. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @param {*} [accumulator] The initial value. + * @returns {*} Returns the accumulated value. + * @see _.reduce + * @example + * + * var array = [[0, 1], [2, 3], [4, 5]]; + * + * _.reduceRight(array, function(flattened, other) { + * return flattened.concat(other); + * }, []); + * // => [4, 5, 2, 3, 0, 1] + */ + function reduceRight(collection, iteratee, accumulator) { + var func = isArray(collection) ? arrayReduceRight : baseReduce, + initAccum = arguments.length < 3; + + return func(collection, getIteratee(iteratee, 4), accumulator, initAccum, baseEachRight); + } + + /** + * The opposite of `_.filter`; this method returns the elements of `collection` + * that `predicate` does **not** return truthy for. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {Array} Returns the new filtered array. + * @see _.filter + * @example + * + * var users = [ + * { 'user': 'barney', 'age': 36, 'active': false }, + * { 'user': 'fred', 'age': 40, 'active': true } + * ]; + * + * _.reject(users, function(o) { return !o.active; }); + * // => objects for ['fred'] + * + * // The `_.matches` iteratee shorthand. + * _.reject(users, { 'age': 40, 'active': true }); + * // => objects for ['barney'] + * + * // The `_.matchesProperty` iteratee shorthand. + * _.reject(users, ['active', false]); + * // => objects for ['fred'] + * + * // The `_.property` iteratee shorthand. + * _.reject(users, 'active'); + * // => objects for ['barney'] + */ + function reject(collection, predicate) { + var func = isArray(collection) ? arrayFilter : baseFilter; + return func(collection, negate(getIteratee(predicate, 3))); + } + + /** + * Gets a random element from `collection`. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Collection + * @param {Array|Object} collection The collection to sample. + * @returns {*} Returns the random element. + * @example + * + * _.sample([1, 2, 3, 4]); + * // => 2 + */ + function sample(collection) { + var func = isArray(collection) ? arraySample : baseSample; + return func(collection); + } + + /** + * Gets `n` random elements at unique keys from `collection` up to the + * size of `collection`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Collection + * @param {Array|Object} collection The collection to sample. + * @param {number} [n=1] The number of elements to sample. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Array} Returns the random elements. + * @example + * + * _.sampleSize([1, 2, 3], 2); + * // => [3, 1] + * + * _.sampleSize([1, 2, 3], 4); + * // => [2, 3, 1] + */ + function sampleSize(collection, n, guard) { + if ((guard ? isIterateeCall(collection, n, guard) : n === undefined)) { + n = 1; + } else { + n = toInteger(n); + } + var func = isArray(collection) ? arraySampleSize : baseSampleSize; + return func(collection, n); + } + + /** + * Creates an array of shuffled values, using a version of the + * [Fisher-Yates shuffle](https://en.wikipedia.org/wiki/Fisher-Yates_shuffle). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to shuffle. + * @returns {Array} Returns the new shuffled array. + * @example + * + * _.shuffle([1, 2, 3, 4]); + * // => [4, 1, 3, 2] + */ + function shuffle(collection) { + var func = isArray(collection) ? arrayShuffle : baseShuffle; + return func(collection); + } + + /** + * Gets the size of `collection` by returning its length for array-like + * values or the number of own enumerable string keyed properties for objects. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object|string} collection The collection to inspect. + * @returns {number} Returns the collection size. + * @example + * + * _.size([1, 2, 3]); + * // => 3 + * + * _.size({ 'a': 1, 'b': 2 }); + * // => 2 + * + * _.size('pebbles'); + * // => 7 + */ + function size(collection) { + if (collection == null) { + return 0; + } + if (isArrayLike(collection)) { + return isString(collection) ? stringSize(collection) : collection.length; + } + var tag = getTag(collection); + if (tag == mapTag || tag == setTag) { + return collection.size; + } + return baseKeys(collection).length; + } + + /** + * Checks if `predicate` returns truthy for **any** element of `collection`. + * Iteration is stopped once `predicate` returns truthy. The predicate is + * invoked with three arguments: (value, index|key, collection). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {boolean} Returns `true` if any element passes the predicate check, + * else `false`. + * @example + * + * _.some([null, 0, 'yes', false], Boolean); + * // => true + * + * var users = [ + * { 'user': 'barney', 'active': true }, + * { 'user': 'fred', 'active': false } + * ]; + * + * // The `_.matches` iteratee shorthand. + * _.some(users, { 'user': 'barney', 'active': false }); + * // => false + * + * // The `_.matchesProperty` iteratee shorthand. + * _.some(users, ['active', false]); + * // => true + * + * // The `_.property` iteratee shorthand. + * _.some(users, 'active'); + * // => true + */ + function some(collection, predicate, guard) { + var func = isArray(collection) ? arraySome : baseSome; + if (guard && isIterateeCall(collection, predicate, guard)) { + predicate = undefined; + } + return func(collection, getIteratee(predicate, 3)); + } + + /** + * Creates an array of elements, sorted in ascending order by the results of + * running each element in a collection thru each iteratee. This method + * performs a stable sort, that is, it preserves the original sort order of + * equal elements. The iteratees are invoked with one argument: (value). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Collection + * @param {Array|Object} collection The collection to iterate over. + * @param {...(Function|Function[])} [iteratees=[_.identity]] + * The iteratees to sort by. + * @returns {Array} Returns the new sorted array. + * @example + * + * var users = [ + * { 'user': 'fred', 'age': 48 }, + * { 'user': 'barney', 'age': 36 }, + * { 'user': 'fred', 'age': 30 }, + * { 'user': 'barney', 'age': 34 } + * ]; + * + * _.sortBy(users, [function(o) { return o.user; }]); + * // => objects for [['barney', 36], ['barney', 34], ['fred', 48], ['fred', 30]] + * + * _.sortBy(users, ['user', 'age']); + * // => objects for [['barney', 34], ['barney', 36], ['fred', 30], ['fred', 48]] + */ + var sortBy = baseRest(function(collection, iteratees) { + if (collection == null) { + return []; + } + var length = iteratees.length; + if (length > 1 && isIterateeCall(collection, iteratees[0], iteratees[1])) { + iteratees = []; + } else if (length > 2 && isIterateeCall(iteratees[0], iteratees[1], iteratees[2])) { + iteratees = [iteratees[0]]; + } + return baseOrderBy(collection, baseFlatten(iteratees, 1), []); + }); + + /*------------------------------------------------------------------------*/ + + /** + * Gets the timestamp of the number of milliseconds that have elapsed since + * the Unix epoch (1 January 1970 00:00:00 UTC). + * + * @static + * @memberOf _ + * @since 2.4.0 + * @category Date + * @returns {number} Returns the timestamp. + * @example + * + * _.defer(function(stamp) { + * console.log(_.now() - stamp); + * }, _.now()); + * // => Logs the number of milliseconds it took for the deferred invocation. + */ + var now = ctxNow || function() { + return root.Date.now(); + }; + + /*------------------------------------------------------------------------*/ + + /** + * The opposite of `_.before`; this method creates a function that invokes + * `func` once it's called `n` or more times. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {number} n The number of calls before `func` is invoked. + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new restricted function. + * @example + * + * var saves = ['profile', 'settings']; + * + * var done = _.after(saves.length, function() { + * console.log('done saving!'); + * }); + * + * _.forEach(saves, function(type) { + * asyncSave({ 'type': type, 'complete': done }); + * }); + * // => Logs 'done saving!' after the two async saves have completed. + */ + function after(n, func) { + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + n = toInteger(n); + return function() { + if (--n < 1) { + return func.apply(this, arguments); + } + }; + } + + /** + * Creates a function that invokes `func`, with up to `n` arguments, + * ignoring any additional arguments. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {Function} func The function to cap arguments for. + * @param {number} [n=func.length] The arity cap. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Function} Returns the new capped function. + * @example + * + * _.map(['6', '8', '10'], _.ary(parseInt, 1)); + * // => [6, 8, 10] + */ + function ary(func, n, guard) { + n = guard ? undefined : n; + n = (func && n == null) ? func.length : n; + return createWrap(func, WRAP_ARY_FLAG, undefined, undefined, undefined, undefined, n); + } + + /** + * Creates a function that invokes `func`, with the `this` binding and arguments + * of the created function, while it's called less than `n` times. Subsequent + * calls to the created function return the result of the last `func` invocation. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {number} n The number of calls at which `func` is no longer invoked. + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new restricted function. + * @example + * + * jQuery(element).on('click', _.before(5, addContactToList)); + * // => Allows adding up to 4 contacts to the list. + */ + function before(n, func) { + var result; + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + n = toInteger(n); + return function() { + if (--n > 0) { + result = func.apply(this, arguments); + } + if (n <= 1) { + func = undefined; + } + return result; + }; + } + + /** + * Creates a function that invokes `func` with the `this` binding of `thisArg` + * and `partials` prepended to the arguments it receives. + * + * The `_.bind.placeholder` value, which defaults to `_` in monolithic builds, + * may be used as a placeholder for partially applied arguments. + * + * **Note:** Unlike native `Function#bind`, this method doesn't set the "length" + * property of bound functions. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to bind. + * @param {*} thisArg The `this` binding of `func`. + * @param {...*} [partials] The arguments to be partially applied. + * @returns {Function} Returns the new bound function. + * @example + * + * function greet(greeting, punctuation) { + * return greeting + ' ' + this.user + punctuation; + * } + * + * var object = { 'user': 'fred' }; + * + * var bound = _.bind(greet, object, 'hi'); + * bound('!'); + * // => 'hi fred!' + * + * // Bound with placeholders. + * var bound = _.bind(greet, object, _, '!'); + * bound('hi'); + * // => 'hi fred!' + */ + var bind = baseRest(function(func, thisArg, partials) { + var bitmask = WRAP_BIND_FLAG; + if (partials.length) { + var holders = replaceHolders(partials, getHolder(bind)); + bitmask |= WRAP_PARTIAL_FLAG; + } + return createWrap(func, bitmask, thisArg, partials, holders); + }); + + /** + * Creates a function that invokes the method at `object[key]` with `partials` + * prepended to the arguments it receives. + * + * This method differs from `_.bind` by allowing bound functions to reference + * methods that may be redefined or don't yet exist. See + * [Peter Michaux's article](http://peter.michaux.ca/articles/lazy-function-definition-pattern) + * for more details. + * + * The `_.bindKey.placeholder` value, which defaults to `_` in monolithic + * builds, may be used as a placeholder for partially applied arguments. + * + * @static + * @memberOf _ + * @since 0.10.0 + * @category Function + * @param {Object} object The object to invoke the method on. + * @param {string} key The key of the method. + * @param {...*} [partials] The arguments to be partially applied. + * @returns {Function} Returns the new bound function. + * @example + * + * var object = { + * 'user': 'fred', + * 'greet': function(greeting, punctuation) { + * return greeting + ' ' + this.user + punctuation; + * } + * }; + * + * var bound = _.bindKey(object, 'greet', 'hi'); + * bound('!'); + * // => 'hi fred!' + * + * object.greet = function(greeting, punctuation) { + * return greeting + 'ya ' + this.user + punctuation; + * }; + * + * bound('!'); + * // => 'hiya fred!' + * + * // Bound with placeholders. + * var bound = _.bindKey(object, 'greet', _, '!'); + * bound('hi'); + * // => 'hiya fred!' + */ + var bindKey = baseRest(function(object, key, partials) { + var bitmask = WRAP_BIND_FLAG | WRAP_BIND_KEY_FLAG; + if (partials.length) { + var holders = replaceHolders(partials, getHolder(bindKey)); + bitmask |= WRAP_PARTIAL_FLAG; + } + return createWrap(key, bitmask, object, partials, holders); + }); + + /** + * Creates a function that accepts arguments of `func` and either invokes + * `func` returning its result, if at least `arity` number of arguments have + * been provided, or returns a function that accepts the remaining `func` + * arguments, and so on. The arity of `func` may be specified if `func.length` + * is not sufficient. + * + * The `_.curry.placeholder` value, which defaults to `_` in monolithic builds, + * may be used as a placeholder for provided arguments. + * + * **Note:** This method doesn't set the "length" property of curried functions. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Function + * @param {Function} func The function to curry. + * @param {number} [arity=func.length] The arity of `func`. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Function} Returns the new curried function. + * @example + * + * var abc = function(a, b, c) { + * return [a, b, c]; + * }; + * + * var curried = _.curry(abc); + * + * curried(1)(2)(3); + * // => [1, 2, 3] + * + * curried(1, 2)(3); + * // => [1, 2, 3] + * + * curried(1, 2, 3); + * // => [1, 2, 3] + * + * // Curried with placeholders. + * curried(1)(_, 3)(2); + * // => [1, 2, 3] + */ + function curry(func, arity, guard) { + arity = guard ? undefined : arity; + var result = createWrap(func, WRAP_CURRY_FLAG, undefined, undefined, undefined, undefined, undefined, arity); + result.placeholder = curry.placeholder; + return result; + } + + /** + * This method is like `_.curry` except that arguments are applied to `func` + * in the manner of `_.partialRight` instead of `_.partial`. + * + * The `_.curryRight.placeholder` value, which defaults to `_` in monolithic + * builds, may be used as a placeholder for provided arguments. + * + * **Note:** This method doesn't set the "length" property of curried functions. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {Function} func The function to curry. + * @param {number} [arity=func.length] The arity of `func`. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Function} Returns the new curried function. + * @example + * + * var abc = function(a, b, c) { + * return [a, b, c]; + * }; + * + * var curried = _.curryRight(abc); + * + * curried(3)(2)(1); + * // => [1, 2, 3] + * + * curried(2, 3)(1); + * // => [1, 2, 3] + * + * curried(1, 2, 3); + * // => [1, 2, 3] + * + * // Curried with placeholders. + * curried(3)(1, _)(2); + * // => [1, 2, 3] + */ + function curryRight(func, arity, guard) { + arity = guard ? undefined : arity; + var result = createWrap(func, WRAP_CURRY_RIGHT_FLAG, undefined, undefined, undefined, undefined, undefined, arity); + result.placeholder = curryRight.placeholder; + return result; + } + + /** + * Creates a debounced function that delays invoking `func` until after `wait` + * milliseconds have elapsed since the last time the debounced function was + * invoked. The debounced function comes with a `cancel` method to cancel + * delayed `func` invocations and a `flush` method to immediately invoke them. + * Provide `options` to indicate whether `func` should be invoked on the + * leading and/or trailing edge of the `wait` timeout. The `func` is invoked + * with the last arguments provided to the debounced function. Subsequent + * calls to the debounced function return the result of the last `func` + * invocation. + * + * **Note:** If `leading` and `trailing` options are `true`, `func` is + * invoked on the trailing edge of the timeout only if the debounced function + * is invoked more than once during the `wait` timeout. + * + * If `wait` is `0` and `leading` is `false`, `func` invocation is deferred + * until to the next tick, similar to `setTimeout` with a timeout of `0`. + * + * See [David Corbacho's article](https://css-tricks.com/debouncing-throttling-explained-examples/) + * for details over the differences between `_.debounce` and `_.throttle`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to debounce. + * @param {number} [wait=0] The number of milliseconds to delay. + * @param {Object} [options={}] The options object. + * @param {boolean} [options.leading=false] + * Specify invoking on the leading edge of the timeout. + * @param {number} [options.maxWait] + * The maximum time `func` is allowed to be delayed before it's invoked. + * @param {boolean} [options.trailing=true] + * Specify invoking on the trailing edge of the timeout. + * @returns {Function} Returns the new debounced function. + * @example + * + * // Avoid costly calculations while the window size is in flux. + * jQuery(window).on('resize', _.debounce(calculateLayout, 150)); + * + * // Invoke `sendMail` when clicked, debouncing subsequent calls. + * jQuery(element).on('click', _.debounce(sendMail, 300, { + * 'leading': true, + * 'trailing': false + * })); + * + * // Ensure `batchLog` is invoked once after 1 second of debounced calls. + * var debounced = _.debounce(batchLog, 250, { 'maxWait': 1000 }); + * var source = new EventSource('/stream'); + * jQuery(source).on('message', debounced); + * + * // Cancel the trailing debounced invocation. + * jQuery(window).on('popstate', debounced.cancel); + */ + function debounce(func, wait, options) { + var lastArgs, + lastThis, + maxWait, + result, + timerId, + lastCallTime, + lastInvokeTime = 0, + leading = false, + maxing = false, + trailing = true; + + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + wait = toNumber(wait) || 0; + if (isObject(options)) { + leading = !!options.leading; + maxing = 'maxWait' in options; + maxWait = maxing ? nativeMax(toNumber(options.maxWait) || 0, wait) : maxWait; + trailing = 'trailing' in options ? !!options.trailing : trailing; + } + + function invokeFunc(time) { + var args = lastArgs, + thisArg = lastThis; + + lastArgs = lastThis = undefined; + lastInvokeTime = time; + result = func.apply(thisArg, args); + return result; + } + + function leadingEdge(time) { + // Reset any `maxWait` timer. + lastInvokeTime = time; + // Start the timer for the trailing edge. + timerId = setTimeout(timerExpired, wait); + // Invoke the leading edge. + return leading ? invokeFunc(time) : result; + } + + function remainingWait(time) { + var timeSinceLastCall = time - lastCallTime, + timeSinceLastInvoke = time - lastInvokeTime, + timeWaiting = wait - timeSinceLastCall; + + return maxing + ? nativeMin(timeWaiting, maxWait - timeSinceLastInvoke) + : timeWaiting; + } + + function shouldInvoke(time) { + var timeSinceLastCall = time - lastCallTime, + timeSinceLastInvoke = time - lastInvokeTime; + + // Either this is the first call, activity has stopped and we're at the + // trailing edge, the system time has gone backwards and we're treating + // it as the trailing edge, or we've hit the `maxWait` limit. + return (lastCallTime === undefined || (timeSinceLastCall >= wait) || + (timeSinceLastCall < 0) || (maxing && timeSinceLastInvoke >= maxWait)); + } + + function timerExpired() { + var time = now(); + if (shouldInvoke(time)) { + return trailingEdge(time); + } + // Restart the timer. + timerId = setTimeout(timerExpired, remainingWait(time)); + } + + function trailingEdge(time) { + timerId = undefined; + + // Only invoke if we have `lastArgs` which means `func` has been + // debounced at least once. + if (trailing && lastArgs) { + return invokeFunc(time); + } + lastArgs = lastThis = undefined; + return result; + } + + function cancel() { + if (timerId !== undefined) { + clearTimeout(timerId); + } + lastInvokeTime = 0; + lastArgs = lastCallTime = lastThis = timerId = undefined; + } + + function flush() { + return timerId === undefined ? result : trailingEdge(now()); + } + + function debounced() { + var time = now(), + isInvoking = shouldInvoke(time); + + lastArgs = arguments; + lastThis = this; + lastCallTime = time; + + if (isInvoking) { + if (timerId === undefined) { + return leadingEdge(lastCallTime); + } + if (maxing) { + // Handle invocations in a tight loop. + clearTimeout(timerId); + timerId = setTimeout(timerExpired, wait); + return invokeFunc(lastCallTime); + } + } + if (timerId === undefined) { + timerId = setTimeout(timerExpired, wait); + } + return result; + } + debounced.cancel = cancel; + debounced.flush = flush; + return debounced; + } + + /** + * Defers invoking the `func` until the current call stack has cleared. Any + * additional arguments are provided to `func` when it's invoked. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to defer. + * @param {...*} [args] The arguments to invoke `func` with. + * @returns {number} Returns the timer id. + * @example + * + * _.defer(function(text) { + * console.log(text); + * }, 'deferred'); + * // => Logs 'deferred' after one millisecond. + */ + var defer = baseRest(function(func, args) { + return baseDelay(func, 1, args); + }); + + /** + * Invokes `func` after `wait` milliseconds. Any additional arguments are + * provided to `func` when it's invoked. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to delay. + * @param {number} wait The number of milliseconds to delay invocation. + * @param {...*} [args] The arguments to invoke `func` with. + * @returns {number} Returns the timer id. + * @example + * + * _.delay(function(text) { + * console.log(text); + * }, 1000, 'later'); + * // => Logs 'later' after one second. + */ + var delay = baseRest(function(func, wait, args) { + return baseDelay(func, toNumber(wait) || 0, args); + }); + + /** + * Creates a function that invokes `func` with arguments reversed. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Function + * @param {Function} func The function to flip arguments for. + * @returns {Function} Returns the new flipped function. + * @example + * + * var flipped = _.flip(function() { + * return _.toArray(arguments); + * }); + * + * flipped('a', 'b', 'c', 'd'); + * // => ['d', 'c', 'b', 'a'] + */ + function flip(func) { + return createWrap(func, WRAP_FLIP_FLAG); + } + + /** + * Creates a function that memoizes the result of `func`. If `resolver` is + * provided, it determines the cache key for storing the result based on the + * arguments provided to the memoized function. By default, the first argument + * provided to the memoized function is used as the map cache key. The `func` + * is invoked with the `this` binding of the memoized function. + * + * **Note:** The cache is exposed as the `cache` property on the memoized + * function. Its creation may be customized by replacing the `_.memoize.Cache` + * constructor with one whose instances implement the + * [`Map`](http://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-map-prototype-object) + * method interface of `clear`, `delete`, `get`, `has`, and `set`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to have its output memoized. + * @param {Function} [resolver] The function to resolve the cache key. + * @returns {Function} Returns the new memoized function. + * @example + * + * var object = { 'a': 1, 'b': 2 }; + * var other = { 'c': 3, 'd': 4 }; + * + * var values = _.memoize(_.values); + * values(object); + * // => [1, 2] + * + * values(other); + * // => [3, 4] + * + * object.a = 2; + * values(object); + * // => [1, 2] + * + * // Modify the result cache. + * values.cache.set(object, ['a', 'b']); + * values(object); + * // => ['a', 'b'] + * + * // Replace `_.memoize.Cache`. + * _.memoize.Cache = WeakMap; + */ + function memoize(func, resolver) { + if (typeof func != 'function' || (resolver != null && typeof resolver != 'function')) { + throw new TypeError(FUNC_ERROR_TEXT); + } + var memoized = function() { + var args = arguments, + key = resolver ? resolver.apply(this, args) : args[0], + cache = memoized.cache; + + if (cache.has(key)) { + return cache.get(key); + } + var result = func.apply(this, args); + memoized.cache = cache.set(key, result) || cache; + return result; + }; + memoized.cache = new (memoize.Cache || MapCache); + return memoized; + } + + // Expose `MapCache`. + memoize.Cache = MapCache; + + /** + * Creates a function that negates the result of the predicate `func`. The + * `func` predicate is invoked with the `this` binding and arguments of the + * created function. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {Function} predicate The predicate to negate. + * @returns {Function} Returns the new negated function. + * @example + * + * function isEven(n) { + * return n % 2 == 0; + * } + * + * _.filter([1, 2, 3, 4, 5, 6], _.negate(isEven)); + * // => [1, 3, 5] + */ + function negate(predicate) { + if (typeof predicate != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + return function() { + var args = arguments; + switch (args.length) { + case 0: return !predicate.call(this); + case 1: return !predicate.call(this, args[0]); + case 2: return !predicate.call(this, args[0], args[1]); + case 3: return !predicate.call(this, args[0], args[1], args[2]); + } + return !predicate.apply(this, args); + }; + } + + /** + * Creates a function that is restricted to invoking `func` once. Repeat calls + * to the function return the value of the first invocation. The `func` is + * invoked with the `this` binding and arguments of the created function. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to restrict. + * @returns {Function} Returns the new restricted function. + * @example + * + * var initialize = _.once(createApplication); + * initialize(); + * initialize(); + * // => `createApplication` is invoked once + */ + function once(func) { + return before(2, func); + } + + /** + * Creates a function that invokes `func` with its arguments transformed. + * + * @static + * @since 4.0.0 + * @memberOf _ + * @category Function + * @param {Function} func The function to wrap. + * @param {...(Function|Function[])} [transforms=[_.identity]] + * The argument transforms. + * @returns {Function} Returns the new function. + * @example + * + * function doubled(n) { + * return n * 2; + * } + * + * function square(n) { + * return n * n; + * } + * + * var func = _.overArgs(function(x, y) { + * return [x, y]; + * }, [square, doubled]); + * + * func(9, 3); + * // => [81, 6] + * + * func(10, 5); + * // => [100, 10] + */ + var overArgs = castRest(function(func, transforms) { + transforms = (transforms.length == 1 && isArray(transforms[0])) + ? arrayMap(transforms[0], baseUnary(getIteratee())) + : arrayMap(baseFlatten(transforms, 1), baseUnary(getIteratee())); + + var funcsLength = transforms.length; + return baseRest(function(args) { + var index = -1, + length = nativeMin(args.length, funcsLength); + + while (++index < length) { + args[index] = transforms[index].call(this, args[index]); + } + return apply(func, this, args); + }); + }); + + /** + * Creates a function that invokes `func` with `partials` prepended to the + * arguments it receives. This method is like `_.bind` except it does **not** + * alter the `this` binding. + * + * The `_.partial.placeholder` value, which defaults to `_` in monolithic + * builds, may be used as a placeholder for partially applied arguments. + * + * **Note:** This method doesn't set the "length" property of partially + * applied functions. + * + * @static + * @memberOf _ + * @since 0.2.0 + * @category Function + * @param {Function} func The function to partially apply arguments to. + * @param {...*} [partials] The arguments to be partially applied. + * @returns {Function} Returns the new partially applied function. + * @example + * + * function greet(greeting, name) { + * return greeting + ' ' + name; + * } + * + * var sayHelloTo = _.partial(greet, 'hello'); + * sayHelloTo('fred'); + * // => 'hello fred' + * + * // Partially applied with placeholders. + * var greetFred = _.partial(greet, _, 'fred'); + * greetFred('hi'); + * // => 'hi fred' + */ + var partial = baseRest(function(func, partials) { + var holders = replaceHolders(partials, getHolder(partial)); + return createWrap(func, WRAP_PARTIAL_FLAG, undefined, partials, holders); + }); + + /** + * This method is like `_.partial` except that partially applied arguments + * are appended to the arguments it receives. + * + * The `_.partialRight.placeholder` value, which defaults to `_` in monolithic + * builds, may be used as a placeholder for partially applied arguments. + * + * **Note:** This method doesn't set the "length" property of partially + * applied functions. + * + * @static + * @memberOf _ + * @since 1.0.0 + * @category Function + * @param {Function} func The function to partially apply arguments to. + * @param {...*} [partials] The arguments to be partially applied. + * @returns {Function} Returns the new partially applied function. + * @example + * + * function greet(greeting, name) { + * return greeting + ' ' + name; + * } + * + * var greetFred = _.partialRight(greet, 'fred'); + * greetFred('hi'); + * // => 'hi fred' + * + * // Partially applied with placeholders. + * var sayHelloTo = _.partialRight(greet, 'hello', _); + * sayHelloTo('fred'); + * // => 'hello fred' + */ + var partialRight = baseRest(function(func, partials) { + var holders = replaceHolders(partials, getHolder(partialRight)); + return createWrap(func, WRAP_PARTIAL_RIGHT_FLAG, undefined, partials, holders); + }); + + /** + * Creates a function that invokes `func` with arguments arranged according + * to the specified `indexes` where the argument value at the first index is + * provided as the first argument, the argument value at the second index is + * provided as the second argument, and so on. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Function + * @param {Function} func The function to rearrange arguments for. + * @param {...(number|number[])} indexes The arranged argument indexes. + * @returns {Function} Returns the new function. + * @example + * + * var rearged = _.rearg(function(a, b, c) { + * return [a, b, c]; + * }, [2, 0, 1]); + * + * rearged('b', 'c', 'a') + * // => ['a', 'b', 'c'] + */ + var rearg = flatRest(function(func, indexes) { + return createWrap(func, WRAP_REARG_FLAG, undefined, undefined, undefined, indexes); + }); + + /** + * Creates a function that invokes `func` with the `this` binding of the + * created function and arguments from `start` and beyond provided as + * an array. + * + * **Note:** This method is based on the + * [rest parameter](https://mdn.io/rest_parameters). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Function + * @param {Function} func The function to apply a rest parameter to. + * @param {number} [start=func.length-1] The start position of the rest parameter. + * @returns {Function} Returns the new function. + * @example + * + * var say = _.rest(function(what, names) { + * return what + ' ' + _.initial(names).join(', ') + + * (_.size(names) > 1 ? ', & ' : '') + _.last(names); + * }); + * + * say('hello', 'fred', 'barney', 'pebbles'); + * // => 'hello fred, barney, & pebbles' + */ + function rest(func, start) { + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + start = start === undefined ? start : toInteger(start); + return baseRest(func, start); + } + + /** + * Creates a function that invokes `func` with the `this` binding of the + * create function and an array of arguments much like + * [`Function#apply`](http://www.ecma-international.org/ecma-262/7.0/#sec-function.prototype.apply). + * + * **Note:** This method is based on the + * [spread operator](https://mdn.io/spread_operator). + * + * @static + * @memberOf _ + * @since 3.2.0 + * @category Function + * @param {Function} func The function to spread arguments over. + * @param {number} [start=0] The start position of the spread. + * @returns {Function} Returns the new function. + * @example + * + * var say = _.spread(function(who, what) { + * return who + ' says ' + what; + * }); + * + * say(['fred', 'hello']); + * // => 'fred says hello' + * + * var numbers = Promise.all([ + * Promise.resolve(40), + * Promise.resolve(36) + * ]); + * + * numbers.then(_.spread(function(x, y) { + * return x + y; + * })); + * // => a Promise of 76 + */ + function spread(func, start) { + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + start = start == null ? 0 : nativeMax(toInteger(start), 0); + return baseRest(function(args) { + var array = args[start], + otherArgs = castSlice(args, 0, start); + + if (array) { + arrayPush(otherArgs, array); + } + return apply(func, this, otherArgs); + }); + } + + /** + * Creates a throttled function that only invokes `func` at most once per + * every `wait` milliseconds. The throttled function comes with a `cancel` + * method to cancel delayed `func` invocations and a `flush` method to + * immediately invoke them. Provide `options` to indicate whether `func` + * should be invoked on the leading and/or trailing edge of the `wait` + * timeout. The `func` is invoked with the last arguments provided to the + * throttled function. Subsequent calls to the throttled function return the + * result of the last `func` invocation. + * + * **Note:** If `leading` and `trailing` options are `true`, `func` is + * invoked on the trailing edge of the timeout only if the throttled function + * is invoked more than once during the `wait` timeout. + * + * If `wait` is `0` and `leading` is `false`, `func` invocation is deferred + * until to the next tick, similar to `setTimeout` with a timeout of `0`. + * + * See [David Corbacho's article](https://css-tricks.com/debouncing-throttling-explained-examples/) + * for details over the differences between `_.throttle` and `_.debounce`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {Function} func The function to throttle. + * @param {number} [wait=0] The number of milliseconds to throttle invocations to. + * @param {Object} [options={}] The options object. + * @param {boolean} [options.leading=true] + * Specify invoking on the leading edge of the timeout. + * @param {boolean} [options.trailing=true] + * Specify invoking on the trailing edge of the timeout. + * @returns {Function} Returns the new throttled function. + * @example + * + * // Avoid excessively updating the position while scrolling. + * jQuery(window).on('scroll', _.throttle(updatePosition, 100)); + * + * // Invoke `renewToken` when the click event is fired, but not more than once every 5 minutes. + * var throttled = _.throttle(renewToken, 300000, { 'trailing': false }); + * jQuery(element).on('click', throttled); + * + * // Cancel the trailing throttled invocation. + * jQuery(window).on('popstate', throttled.cancel); + */ + function throttle(func, wait, options) { + var leading = true, + trailing = true; + + if (typeof func != 'function') { + throw new TypeError(FUNC_ERROR_TEXT); + } + if (isObject(options)) { + leading = 'leading' in options ? !!options.leading : leading; + trailing = 'trailing' in options ? !!options.trailing : trailing; + } + return debounce(func, wait, { + 'leading': leading, + 'maxWait': wait, + 'trailing': trailing + }); + } + + /** + * Creates a function that accepts up to one argument, ignoring any + * additional arguments. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Function + * @param {Function} func The function to cap arguments for. + * @returns {Function} Returns the new capped function. + * @example + * + * _.map(['6', '8', '10'], _.unary(parseInt)); + * // => [6, 8, 10] + */ + function unary(func) { + return ary(func, 1); + } + + /** + * Creates a function that provides `value` to `wrapper` as its first + * argument. Any additional arguments provided to the function are appended + * to those provided to the `wrapper`. The wrapper is invoked with the `this` + * binding of the created function. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Function + * @param {*} value The value to wrap. + * @param {Function} [wrapper=identity] The wrapper function. + * @returns {Function} Returns the new function. + * @example + * + * var p = _.wrap(_.escape, function(func, text) { + * return '

' + func(text) + '

'; + * }); + * + * p('fred, barney, & pebbles'); + * // => '

fred, barney, & pebbles

' + */ + function wrap(value, wrapper) { + return partial(castFunction(wrapper), value); + } + + /*------------------------------------------------------------------------*/ + + /** + * Casts `value` as an array if it's not one. + * + * @static + * @memberOf _ + * @since 4.4.0 + * @category Lang + * @param {*} value The value to inspect. + * @returns {Array} Returns the cast array. + * @example + * + * _.castArray(1); + * // => [1] + * + * _.castArray({ 'a': 1 }); + * // => [{ 'a': 1 }] + * + * _.castArray('abc'); + * // => ['abc'] + * + * _.castArray(null); + * // => [null] + * + * _.castArray(undefined); + * // => [undefined] + * + * _.castArray(); + * // => [] + * + * var array = [1, 2, 3]; + * console.log(_.castArray(array) === array); + * // => true + */ + function castArray() { + if (!arguments.length) { + return []; + } + var value = arguments[0]; + return isArray(value) ? value : [value]; + } + + /** + * Creates a shallow clone of `value`. + * + * **Note:** This method is loosely based on the + * [structured clone algorithm](https://mdn.io/Structured_clone_algorithm) + * and supports cloning arrays, array buffers, booleans, date objects, maps, + * numbers, `Object` objects, regexes, sets, strings, symbols, and typed + * arrays. The own enumerable properties of `arguments` objects are cloned + * as plain objects. An empty object is returned for uncloneable values such + * as error objects, functions, DOM nodes, and WeakMaps. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to clone. + * @returns {*} Returns the cloned value. + * @see _.cloneDeep + * @example + * + * var objects = [{ 'a': 1 }, { 'b': 2 }]; + * + * var shallow = _.clone(objects); + * console.log(shallow[0] === objects[0]); + * // => true + */ + function clone(value) { + return baseClone(value, CLONE_SYMBOLS_FLAG); + } + + /** + * This method is like `_.clone` except that it accepts `customizer` which + * is invoked to produce the cloned value. If `customizer` returns `undefined`, + * cloning is handled by the method instead. The `customizer` is invoked with + * up to four arguments; (value [, index|key, object, stack]). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to clone. + * @param {Function} [customizer] The function to customize cloning. + * @returns {*} Returns the cloned value. + * @see _.cloneDeepWith + * @example + * + * function customizer(value) { + * if (_.isElement(value)) { + * return value.cloneNode(false); + * } + * } + * + * var el = _.cloneWith(document.body, customizer); + * + * console.log(el === document.body); + * // => false + * console.log(el.nodeName); + * // => 'BODY' + * console.log(el.childNodes.length); + * // => 0 + */ + function cloneWith(value, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return baseClone(value, CLONE_SYMBOLS_FLAG, customizer); + } + + /** + * This method is like `_.clone` except that it recursively clones `value`. + * + * @static + * @memberOf _ + * @since 1.0.0 + * @category Lang + * @param {*} value The value to recursively clone. + * @returns {*} Returns the deep cloned value. + * @see _.clone + * @example + * + * var objects = [{ 'a': 1 }, { 'b': 2 }]; + * + * var deep = _.cloneDeep(objects); + * console.log(deep[0] === objects[0]); + * // => false + */ + function cloneDeep(value) { + return baseClone(value, CLONE_DEEP_FLAG | CLONE_SYMBOLS_FLAG); + } + + /** + * This method is like `_.cloneWith` except that it recursively clones `value`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to recursively clone. + * @param {Function} [customizer] The function to customize cloning. + * @returns {*} Returns the deep cloned value. + * @see _.cloneWith + * @example + * + * function customizer(value) { + * if (_.isElement(value)) { + * return value.cloneNode(true); + * } + * } + * + * var el = _.cloneDeepWith(document.body, customizer); + * + * console.log(el === document.body); + * // => false + * console.log(el.nodeName); + * // => 'BODY' + * console.log(el.childNodes.length); + * // => 20 + */ + function cloneDeepWith(value, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return baseClone(value, CLONE_DEEP_FLAG | CLONE_SYMBOLS_FLAG, customizer); + } + + /** + * Checks if `object` conforms to `source` by invoking the predicate + * properties of `source` with the corresponding property values of `object`. + * + * **Note:** This method is equivalent to `_.conforms` when `source` is + * partially applied. + * + * @static + * @memberOf _ + * @since 4.14.0 + * @category Lang + * @param {Object} object The object to inspect. + * @param {Object} source The object of property predicates to conform to. + * @returns {boolean} Returns `true` if `object` conforms, else `false`. + * @example + * + * var object = { 'a': 1, 'b': 2 }; + * + * _.conformsTo(object, { 'b': function(n) { return n > 1; } }); + * // => true + * + * _.conformsTo(object, { 'b': function(n) { return n > 2; } }); + * // => false + */ + function conformsTo(object, source) { + return source == null || baseConformsTo(object, source, keys(source)); + } + + /** + * Performs a + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * comparison between two values to determine if they are equivalent. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + * @example + * + * var object = { 'a': 1 }; + * var other = { 'a': 1 }; + * + * _.eq(object, object); + * // => true + * + * _.eq(object, other); + * // => false + * + * _.eq('a', 'a'); + * // => true + * + * _.eq('a', Object('a')); + * // => false + * + * _.eq(NaN, NaN); + * // => true + */ + function eq(value, other) { + return value === other || (value !== value && other !== other); + } + + /** + * Checks if `value` is greater than `other`. + * + * @static + * @memberOf _ + * @since 3.9.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is greater than `other`, + * else `false`. + * @see _.lt + * @example + * + * _.gt(3, 1); + * // => true + * + * _.gt(3, 3); + * // => false + * + * _.gt(1, 3); + * // => false + */ + var gt = createRelationalOperation(baseGt); + + /** + * Checks if `value` is greater than or equal to `other`. + * + * @static + * @memberOf _ + * @since 3.9.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is greater than or equal to + * `other`, else `false`. + * @see _.lte + * @example + * + * _.gte(3, 1); + * // => true + * + * _.gte(3, 3); + * // => true + * + * _.gte(1, 3); + * // => false + */ + var gte = createRelationalOperation(function(value, other) { + return value >= other; + }); + + /** + * Checks if `value` is likely an `arguments` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an `arguments` object, + * else `false`. + * @example + * + * _.isArguments(function() { return arguments; }()); + * // => true + * + * _.isArguments([1, 2, 3]); + * // => false + */ + var isArguments = baseIsArguments(function() { return arguments; }()) ? baseIsArguments : function(value) { + return isObjectLike(value) && hasOwnProperty.call(value, 'callee') && + !propertyIsEnumerable.call(value, 'callee'); + }; + + /** + * Checks if `value` is classified as an `Array` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array, else `false`. + * @example + * + * _.isArray([1, 2, 3]); + * // => true + * + * _.isArray(document.body.children); + * // => false + * + * _.isArray('abc'); + * // => false + * + * _.isArray(_.noop); + * // => false + */ + var isArray = Array.isArray; + + /** + * Checks if `value` is classified as an `ArrayBuffer` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array buffer, else `false`. + * @example + * + * _.isArrayBuffer(new ArrayBuffer(2)); + * // => true + * + * _.isArrayBuffer(new Array(2)); + * // => false + */ + var isArrayBuffer = nodeIsArrayBuffer ? baseUnary(nodeIsArrayBuffer) : baseIsArrayBuffer; + + /** + * Checks if `value` is array-like. A value is considered array-like if it's + * not a function and has a `value.length` that's an integer greater than or + * equal to `0` and less than or equal to `Number.MAX_SAFE_INTEGER`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is array-like, else `false`. + * @example + * + * _.isArrayLike([1, 2, 3]); + * // => true + * + * _.isArrayLike(document.body.children); + * // => true + * + * _.isArrayLike('abc'); + * // => true + * + * _.isArrayLike(_.noop); + * // => false + */ + function isArrayLike(value) { + return value != null && isLength(value.length) && !isFunction(value); + } + + /** + * This method is like `_.isArrayLike` except that it also checks if `value` + * is an object. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an array-like object, + * else `false`. + * @example + * + * _.isArrayLikeObject([1, 2, 3]); + * // => true + * + * _.isArrayLikeObject(document.body.children); + * // => true + * + * _.isArrayLikeObject('abc'); + * // => false + * + * _.isArrayLikeObject(_.noop); + * // => false + */ + function isArrayLikeObject(value) { + return isObjectLike(value) && isArrayLike(value); + } + + /** + * Checks if `value` is classified as a boolean primitive or object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a boolean, else `false`. + * @example + * + * _.isBoolean(false); + * // => true + * + * _.isBoolean(null); + * // => false + */ + function isBoolean(value) { + return value === true || value === false || + (isObjectLike(value) && baseGetTag(value) == boolTag); + } + + /** + * Checks if `value` is a buffer. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a buffer, else `false`. + * @example + * + * _.isBuffer(new Buffer(2)); + * // => true + * + * _.isBuffer(new Uint8Array(2)); + * // => false + */ + var isBuffer = nativeIsBuffer || stubFalse; + + /** + * Checks if `value` is classified as a `Date` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a date object, else `false`. + * @example + * + * _.isDate(new Date); + * // => true + * + * _.isDate('Mon April 23 2012'); + * // => false + */ + var isDate = nodeIsDate ? baseUnary(nodeIsDate) : baseIsDate; + + /** + * Checks if `value` is likely a DOM element. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a DOM element, else `false`. + * @example + * + * _.isElement(document.body); + * // => true + * + * _.isElement(''); + * // => false + */ + function isElement(value) { + return isObjectLike(value) && value.nodeType === 1 && !isPlainObject(value); + } + + /** + * Checks if `value` is an empty object, collection, map, or set. + * + * Objects are considered empty if they have no own enumerable string keyed + * properties. + * + * Array-like values such as `arguments` objects, arrays, buffers, strings, or + * jQuery-like collections are considered empty if they have a `length` of `0`. + * Similarly, maps and sets are considered empty if they have a `size` of `0`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is empty, else `false`. + * @example + * + * _.isEmpty(null); + * // => true + * + * _.isEmpty(true); + * // => true + * + * _.isEmpty(1); + * // => true + * + * _.isEmpty([1, 2, 3]); + * // => false + * + * _.isEmpty({ 'a': 1 }); + * // => false + */ + function isEmpty(value) { + if (value == null) { + return true; + } + if (isArrayLike(value) && + (isArray(value) || typeof value == 'string' || typeof value.splice == 'function' || + isBuffer(value) || isTypedArray(value) || isArguments(value))) { + return !value.length; + } + var tag = getTag(value); + if (tag == mapTag || tag == setTag) { + return !value.size; + } + if (isPrototype(value)) { + return !baseKeys(value).length; + } + for (var key in value) { + if (hasOwnProperty.call(value, key)) { + return false; + } + } + return true; + } + + /** + * Performs a deep comparison between two values to determine if they are + * equivalent. + * + * **Note:** This method supports comparing arrays, array buffers, booleans, + * date objects, error objects, maps, numbers, `Object` objects, regexes, + * sets, strings, symbols, and typed arrays. `Object` objects are compared + * by their own, not inherited, enumerable properties. Functions and DOM + * nodes are compared by strict equality, i.e. `===`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + * @example + * + * var object = { 'a': 1 }; + * var other = { 'a': 1 }; + * + * _.isEqual(object, other); + * // => true + * + * object === other; + * // => false + */ + function isEqual(value, other) { + return baseIsEqual(value, other); + } + + /** + * This method is like `_.isEqual` except that it accepts `customizer` which + * is invoked to compare values. If `customizer` returns `undefined`, comparisons + * are handled by the method instead. The `customizer` is invoked with up to + * six arguments: (objValue, othValue [, index|key, object, other, stack]). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @param {Function} [customizer] The function to customize comparisons. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + * @example + * + * function isGreeting(value) { + * return /^h(?:i|ello)$/.test(value); + * } + * + * function customizer(objValue, othValue) { + * if (isGreeting(objValue) && isGreeting(othValue)) { + * return true; + * } + * } + * + * var array = ['hello', 'goodbye']; + * var other = ['hi', 'goodbye']; + * + * _.isEqualWith(array, other, customizer); + * // => true + */ + function isEqualWith(value, other, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + var result = customizer ? customizer(value, other) : undefined; + return result === undefined ? baseIsEqual(value, other, undefined, customizer) : !!result; + } + + /** + * Checks if `value` is an `Error`, `EvalError`, `RangeError`, `ReferenceError`, + * `SyntaxError`, `TypeError`, or `URIError` object. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an error object, else `false`. + * @example + * + * _.isError(new Error); + * // => true + * + * _.isError(Error); + * // => false + */ + function isError(value) { + if (!isObjectLike(value)) { + return false; + } + var tag = baseGetTag(value); + return tag == errorTag || tag == domExcTag || + (typeof value.message == 'string' && typeof value.name == 'string' && !isPlainObject(value)); + } + + /** + * Checks if `value` is a finite primitive number. + * + * **Note:** This method is based on + * [`Number.isFinite`](https://mdn.io/Number/isFinite). + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a finite number, else `false`. + * @example + * + * _.isFinite(3); + * // => true + * + * _.isFinite(Number.MIN_VALUE); + * // => true + * + * _.isFinite(Infinity); + * // => false + * + * _.isFinite('3'); + * // => false + */ + function isFinite(value) { + return typeof value == 'number' && nativeIsFinite(value); + } + + /** + * Checks if `value` is classified as a `Function` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a function, else `false`. + * @example + * + * _.isFunction(_); + * // => true + * + * _.isFunction(/abc/); + * // => false + */ + function isFunction(value) { + if (!isObject(value)) { + return false; + } + // The use of `Object#toString` avoids issues with the `typeof` operator + // in Safari 9 which returns 'object' for typed arrays and other constructors. + var tag = baseGetTag(value); + return tag == funcTag || tag == genTag || tag == asyncTag || tag == proxyTag; + } + + /** + * Checks if `value` is an integer. + * + * **Note:** This method is based on + * [`Number.isInteger`](https://mdn.io/Number/isInteger). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an integer, else `false`. + * @example + * + * _.isInteger(3); + * // => true + * + * _.isInteger(Number.MIN_VALUE); + * // => false + * + * _.isInteger(Infinity); + * // => false + * + * _.isInteger('3'); + * // => false + */ + function isInteger(value) { + return typeof value == 'number' && value == toInteger(value); + } + + /** + * Checks if `value` is a valid array-like length. + * + * **Note:** This method is loosely based on + * [`ToLength`](http://ecma-international.org/ecma-262/7.0/#sec-tolength). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a valid length, else `false`. + * @example + * + * _.isLength(3); + * // => true + * + * _.isLength(Number.MIN_VALUE); + * // => false + * + * _.isLength(Infinity); + * // => false + * + * _.isLength('3'); + * // => false + */ + function isLength(value) { + return typeof value == 'number' && + value > -1 && value % 1 == 0 && value <= MAX_SAFE_INTEGER; + } + + /** + * Checks if `value` is the + * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) + * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an object, else `false`. + * @example + * + * _.isObject({}); + * // => true + * + * _.isObject([1, 2, 3]); + * // => true + * + * _.isObject(_.noop); + * // => true + * + * _.isObject(null); + * // => false + */ + function isObject(value) { + var type = typeof value; + return value != null && (type == 'object' || type == 'function'); + } + + /** + * Checks if `value` is object-like. A value is object-like if it's not `null` + * and has a `typeof` result of "object". + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is object-like, else `false`. + * @example + * + * _.isObjectLike({}); + * // => true + * + * _.isObjectLike([1, 2, 3]); + * // => true + * + * _.isObjectLike(_.noop); + * // => false + * + * _.isObjectLike(null); + * // => false + */ + function isObjectLike(value) { + return value != null && typeof value == 'object'; + } + + /** + * Checks if `value` is classified as a `Map` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a map, else `false`. + * @example + * + * _.isMap(new Map); + * // => true + * + * _.isMap(new WeakMap); + * // => false + */ + var isMap = nodeIsMap ? baseUnary(nodeIsMap) : baseIsMap; + + /** + * Performs a partial deep comparison between `object` and `source` to + * determine if `object` contains equivalent property values. + * + * **Note:** This method is equivalent to `_.matches` when `source` is + * partially applied. + * + * Partial comparisons will match empty array and empty object `source` + * values against any array or object value, respectively. See `_.isEqual` + * for a list of supported value comparisons. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {Object} object The object to inspect. + * @param {Object} source The object of property values to match. + * @returns {boolean} Returns `true` if `object` is a match, else `false`. + * @example + * + * var object = { 'a': 1, 'b': 2 }; + * + * _.isMatch(object, { 'b': 2 }); + * // => true + * + * _.isMatch(object, { 'b': 1 }); + * // => false + */ + function isMatch(object, source) { + return object === source || baseIsMatch(object, source, getMatchData(source)); + } + + /** + * This method is like `_.isMatch` except that it accepts `customizer` which + * is invoked to compare values. If `customizer` returns `undefined`, comparisons + * are handled by the method instead. The `customizer` is invoked with five + * arguments: (objValue, srcValue, index|key, object, source). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {Object} object The object to inspect. + * @param {Object} source The object of property values to match. + * @param {Function} [customizer] The function to customize comparisons. + * @returns {boolean} Returns `true` if `object` is a match, else `false`. + * @example + * + * function isGreeting(value) { + * return /^h(?:i|ello)$/.test(value); + * } + * + * function customizer(objValue, srcValue) { + * if (isGreeting(objValue) && isGreeting(srcValue)) { + * return true; + * } + * } + * + * var object = { 'greeting': 'hello' }; + * var source = { 'greeting': 'hi' }; + * + * _.isMatchWith(object, source, customizer); + * // => true + */ + function isMatchWith(object, source, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return baseIsMatch(object, source, getMatchData(source), customizer); + } + + /** + * Checks if `value` is `NaN`. + * + * **Note:** This method is based on + * [`Number.isNaN`](https://mdn.io/Number/isNaN) and is not the same as + * global [`isNaN`](https://mdn.io/isNaN) which returns `true` for + * `undefined` and other non-number values. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`. + * @example + * + * _.isNaN(NaN); + * // => true + * + * _.isNaN(new Number(NaN)); + * // => true + * + * isNaN(undefined); + * // => true + * + * _.isNaN(undefined); + * // => false + */ + function isNaN(value) { + // An `NaN` primitive is the only value that is not equal to itself. + // Perform the `toStringTag` check first to avoid errors with some + // ActiveX objects in IE. + return isNumber(value) && value != +value; + } + + /** + * Checks if `value` is a pristine native function. + * + * **Note:** This method can't reliably detect native functions in the presence + * of the core-js package because core-js circumvents this kind of detection. + * Despite multiple requests, the core-js maintainer has made it clear: any + * attempt to fix the detection will be obstructed. As a result, we're left + * with little choice but to throw an error. Unfortunately, this also affects + * packages, like [babel-polyfill](https://www.npmjs.com/package/babel-polyfill), + * which rely on core-js. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a native function, + * else `false`. + * @example + * + * _.isNative(Array.prototype.push); + * // => true + * + * _.isNative(_); + * // => false + */ + function isNative(value) { + if (isMaskable(value)) { + throw new Error(CORE_ERROR_TEXT); + } + return baseIsNative(value); + } + + /** + * Checks if `value` is `null`. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `null`, else `false`. + * @example + * + * _.isNull(null); + * // => true + * + * _.isNull(void 0); + * // => false + */ + function isNull(value) { + return value === null; + } + + /** + * Checks if `value` is `null` or `undefined`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is nullish, else `false`. + * @example + * + * _.isNil(null); + * // => true + * + * _.isNil(void 0); + * // => true + * + * _.isNil(NaN); + * // => false + */ + function isNil(value) { + return value == null; + } + + /** + * Checks if `value` is classified as a `Number` primitive or object. + * + * **Note:** To exclude `Infinity`, `-Infinity`, and `NaN`, which are + * classified as numbers, use the `_.isFinite` method. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a number, else `false`. + * @example + * + * _.isNumber(3); + * // => true + * + * _.isNumber(Number.MIN_VALUE); + * // => true + * + * _.isNumber(Infinity); + * // => true + * + * _.isNumber('3'); + * // => false + */ + function isNumber(value) { + return typeof value == 'number' || + (isObjectLike(value) && baseGetTag(value) == numberTag); + } + + /** + * Checks if `value` is a plain object, that is, an object created by the + * `Object` constructor or one with a `[[Prototype]]` of `null`. + * + * @static + * @memberOf _ + * @since 0.8.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a plain object, else `false`. + * @example + * + * function Foo() { + * this.a = 1; + * } + * + * _.isPlainObject(new Foo); + * // => false + * + * _.isPlainObject([1, 2, 3]); + * // => false + * + * _.isPlainObject({ 'x': 0, 'y': 0 }); + * // => true + * + * _.isPlainObject(Object.create(null)); + * // => true + */ + function isPlainObject(value) { + if (!isObjectLike(value) || baseGetTag(value) != objectTag) { + return false; + } + var proto = getPrototype(value); + if (proto === null) { + return true; + } + var Ctor = hasOwnProperty.call(proto, 'constructor') && proto.constructor; + return typeof Ctor == 'function' && Ctor instanceof Ctor && + funcToString.call(Ctor) == objectCtorString; + } + + /** + * Checks if `value` is classified as a `RegExp` object. + * + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a regexp, else `false`. + * @example + * + * _.isRegExp(/abc/); + * // => true + * + * _.isRegExp('/abc/'); + * // => false + */ + var isRegExp = nodeIsRegExp ? baseUnary(nodeIsRegExp) : baseIsRegExp; + + /** + * Checks if `value` is a safe integer. An integer is safe if it's an IEEE-754 + * double precision number which isn't the result of a rounded unsafe integer. + * + * **Note:** This method is based on + * [`Number.isSafeInteger`](https://mdn.io/Number/isSafeInteger). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a safe integer, else `false`. + * @example + * + * _.isSafeInteger(3); + * // => true + * + * _.isSafeInteger(Number.MIN_VALUE); + * // => false + * + * _.isSafeInteger(Infinity); + * // => false + * + * _.isSafeInteger('3'); + * // => false + */ + function isSafeInteger(value) { + return isInteger(value) && value >= -MAX_SAFE_INTEGER && value <= MAX_SAFE_INTEGER; + } + + /** + * Checks if `value` is classified as a `Set` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a set, else `false`. + * @example + * + * _.isSet(new Set); + * // => true + * + * _.isSet(new WeakSet); + * // => false + */ + var isSet = nodeIsSet ? baseUnary(nodeIsSet) : baseIsSet; + + /** + * Checks if `value` is classified as a `String` primitive or object. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a string, else `false`. + * @example + * + * _.isString('abc'); + * // => true + * + * _.isString(1); + * // => false + */ + function isString(value) { + return typeof value == 'string' || + (!isArray(value) && isObjectLike(value) && baseGetTag(value) == stringTag); + } + + /** + * Checks if `value` is classified as a `Symbol` primitive or object. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a symbol, else `false`. + * @example + * + * _.isSymbol(Symbol.iterator); + * // => true + * + * _.isSymbol('abc'); + * // => false + */ + function isSymbol(value) { + return typeof value == 'symbol' || + (isObjectLike(value) && baseGetTag(value) == symbolTag); + } + + /** + * Checks if `value` is classified as a typed array. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a typed array, else `false`. + * @example + * + * _.isTypedArray(new Uint8Array); + * // => true + * + * _.isTypedArray([]); + * // => false + */ + var isTypedArray = nodeIsTypedArray ? baseUnary(nodeIsTypedArray) : baseIsTypedArray; + + /** + * Checks if `value` is `undefined`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `undefined`, else `false`. + * @example + * + * _.isUndefined(void 0); + * // => true + * + * _.isUndefined(null); + * // => false + */ + function isUndefined(value) { + return value === undefined; + } + + /** + * Checks if `value` is classified as a `WeakMap` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a weak map, else `false`. + * @example + * + * _.isWeakMap(new WeakMap); + * // => true + * + * _.isWeakMap(new Map); + * // => false + */ + function isWeakMap(value) { + return isObjectLike(value) && getTag(value) == weakMapTag; + } + + /** + * Checks if `value` is classified as a `WeakSet` object. + * + * @static + * @memberOf _ + * @since 4.3.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a weak set, else `false`. + * @example + * + * _.isWeakSet(new WeakSet); + * // => true + * + * _.isWeakSet(new Set); + * // => false + */ + function isWeakSet(value) { + return isObjectLike(value) && baseGetTag(value) == weakSetTag; + } + + /** + * Checks if `value` is less than `other`. + * + * @static + * @memberOf _ + * @since 3.9.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is less than `other`, + * else `false`. + * @see _.gt + * @example + * + * _.lt(1, 3); + * // => true + * + * _.lt(3, 3); + * // => false + * + * _.lt(3, 1); + * // => false + */ + var lt = createRelationalOperation(baseLt); + + /** + * Checks if `value` is less than or equal to `other`. + * + * @static + * @memberOf _ + * @since 3.9.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if `value` is less than or equal to + * `other`, else `false`. + * @see _.gte + * @example + * + * _.lte(1, 3); + * // => true + * + * _.lte(3, 3); + * // => true + * + * _.lte(3, 1); + * // => false + */ + var lte = createRelationalOperation(function(value, other) { + return value <= other; + }); + + /** + * Converts `value` to an array. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Lang + * @param {*} value The value to convert. + * @returns {Array} Returns the converted array. + * @example + * + * _.toArray({ 'a': 1, 'b': 2 }); + * // => [1, 2] + * + * _.toArray('abc'); + * // => ['a', 'b', 'c'] + * + * _.toArray(1); + * // => [] + * + * _.toArray(null); + * // => [] + */ + function toArray(value) { + if (!value) { + return []; + } + if (isArrayLike(value)) { + return isString(value) ? stringToArray(value) : copyArray(value); + } + if (symIterator && value[symIterator]) { + return iteratorToArray(value[symIterator]()); + } + var tag = getTag(value), + func = tag == mapTag ? mapToArray : (tag == setTag ? setToArray : values); + + return func(value); + } + + /** + * Converts `value` to a finite number. + * + * @static + * @memberOf _ + * @since 4.12.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted number. + * @example + * + * _.toFinite(3.2); + * // => 3.2 + * + * _.toFinite(Number.MIN_VALUE); + * // => 5e-324 + * + * _.toFinite(Infinity); + * // => 1.7976931348623157e+308 + * + * _.toFinite('3.2'); + * // => 3.2 + */ + function toFinite(value) { + if (!value) { + return value === 0 ? value : 0; + } + value = toNumber(value); + if (value === INFINITY || value === -INFINITY) { + var sign = (value < 0 ? -1 : 1); + return sign * MAX_INTEGER; + } + return value === value ? value : 0; + } + + /** + * Converts `value` to an integer. + * + * **Note:** This method is loosely based on + * [`ToInteger`](http://www.ecma-international.org/ecma-262/7.0/#sec-tointeger). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted integer. + * @example + * + * _.toInteger(3.2); + * // => 3 + * + * _.toInteger(Number.MIN_VALUE); + * // => 0 + * + * _.toInteger(Infinity); + * // => 1.7976931348623157e+308 + * + * _.toInteger('3.2'); + * // => 3 + */ + function toInteger(value) { + var result = toFinite(value), + remainder = result % 1; + + return result === result ? (remainder ? result - remainder : result) : 0; + } + + /** + * Converts `value` to an integer suitable for use as the length of an + * array-like object. + * + * **Note:** This method is based on + * [`ToLength`](http://ecma-international.org/ecma-262/7.0/#sec-tolength). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted integer. + * @example + * + * _.toLength(3.2); + * // => 3 + * + * _.toLength(Number.MIN_VALUE); + * // => 0 + * + * _.toLength(Infinity); + * // => 4294967295 + * + * _.toLength('3.2'); + * // => 3 + */ + function toLength(value) { + return value ? baseClamp(toInteger(value), 0, MAX_ARRAY_LENGTH) : 0; + } + + /** + * Converts `value` to a number. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to process. + * @returns {number} Returns the number. + * @example + * + * _.toNumber(3.2); + * // => 3.2 + * + * _.toNumber(Number.MIN_VALUE); + * // => 5e-324 + * + * _.toNumber(Infinity); + * // => Infinity + * + * _.toNumber('3.2'); + * // => 3.2 + */ + function toNumber(value) { + if (typeof value == 'number') { + return value; + } + if (isSymbol(value)) { + return NAN; + } + if (isObject(value)) { + var other = typeof value.valueOf == 'function' ? value.valueOf() : value; + value = isObject(other) ? (other + '') : other; + } + if (typeof value != 'string') { + return value === 0 ? value : +value; + } + value = baseTrim(value); + var isBinary = reIsBinary.test(value); + return (isBinary || reIsOctal.test(value)) + ? freeParseInt(value.slice(2), isBinary ? 2 : 8) + : (reIsBadHex.test(value) ? NAN : +value); + } + + /** + * Converts `value` to a plain object flattening inherited enumerable string + * keyed properties of `value` to own properties of the plain object. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {Object} Returns the converted plain object. + * @example + * + * function Foo() { + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.assign({ 'a': 1 }, new Foo); + * // => { 'a': 1, 'b': 2 } + * + * _.assign({ 'a': 1 }, _.toPlainObject(new Foo)); + * // => { 'a': 1, 'b': 2, 'c': 3 } + */ + function toPlainObject(value) { + return copyObject(value, keysIn(value)); + } + + /** + * Converts `value` to a safe integer. A safe integer can be compared and + * represented correctly. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {number} Returns the converted integer. + * @example + * + * _.toSafeInteger(3.2); + * // => 3 + * + * _.toSafeInteger(Number.MIN_VALUE); + * // => 0 + * + * _.toSafeInteger(Infinity); + * // => 9007199254740991 + * + * _.toSafeInteger('3.2'); + * // => 3 + */ + function toSafeInteger(value) { + return value + ? baseClamp(toInteger(value), -MAX_SAFE_INTEGER, MAX_SAFE_INTEGER) + : (value === 0 ? value : 0); + } + + /** + * Converts `value` to a string. An empty string is returned for `null` + * and `undefined` values. The sign of `-0` is preserved. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to convert. + * @returns {string} Returns the converted string. + * @example + * + * _.toString(null); + * // => '' + * + * _.toString(-0); + * // => '-0' + * + * _.toString([1, 2, 3]); + * // => '1,2,3' + */ + function toString(value) { + return value == null ? '' : baseToString(value); + } + + /*------------------------------------------------------------------------*/ + + /** + * Assigns own enumerable string keyed properties of source objects to the + * destination object. Source objects are applied from left to right. + * Subsequent sources overwrite property assignments of previous sources. + * + * **Note:** This method mutates `object` and is loosely based on + * [`Object.assign`](https://mdn.io/Object/assign). + * + * @static + * @memberOf _ + * @since 0.10.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.assignIn + * @example + * + * function Foo() { + * this.a = 1; + * } + * + * function Bar() { + * this.c = 3; + * } + * + * Foo.prototype.b = 2; + * Bar.prototype.d = 4; + * + * _.assign({ 'a': 0 }, new Foo, new Bar); + * // => { 'a': 1, 'c': 3 } + */ + var assign = createAssigner(function(object, source) { + if (isPrototype(source) || isArrayLike(source)) { + copyObject(source, keys(source), object); + return; + } + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + assignValue(object, key, source[key]); + } + } + }); + + /** + * This method is like `_.assign` except that it iterates over own and + * inherited source properties. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @alias extend + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.assign + * @example + * + * function Foo() { + * this.a = 1; + * } + * + * function Bar() { + * this.c = 3; + * } + * + * Foo.prototype.b = 2; + * Bar.prototype.d = 4; + * + * _.assignIn({ 'a': 0 }, new Foo, new Bar); + * // => { 'a': 1, 'b': 2, 'c': 3, 'd': 4 } + */ + var assignIn = createAssigner(function(object, source) { + copyObject(source, keysIn(source), object); + }); + + /** + * This method is like `_.assignIn` except that it accepts `customizer` + * which is invoked to produce the assigned values. If `customizer` returns + * `undefined`, assignment is handled by the method instead. The `customizer` + * is invoked with five arguments: (objValue, srcValue, key, object, source). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @alias extendWith + * @category Object + * @param {Object} object The destination object. + * @param {...Object} sources The source objects. + * @param {Function} [customizer] The function to customize assigned values. + * @returns {Object} Returns `object`. + * @see _.assignWith + * @example + * + * function customizer(objValue, srcValue) { + * return _.isUndefined(objValue) ? srcValue : objValue; + * } + * + * var defaults = _.partialRight(_.assignInWith, customizer); + * + * defaults({ 'a': 1 }, { 'b': 2 }, { 'a': 3 }); + * // => { 'a': 1, 'b': 2 } + */ + var assignInWith = createAssigner(function(object, source, srcIndex, customizer) { + copyObject(source, keysIn(source), object, customizer); + }); + + /** + * This method is like `_.assign` except that it accepts `customizer` + * which is invoked to produce the assigned values. If `customizer` returns + * `undefined`, assignment is handled by the method instead. The `customizer` + * is invoked with five arguments: (objValue, srcValue, key, object, source). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} sources The source objects. + * @param {Function} [customizer] The function to customize assigned values. + * @returns {Object} Returns `object`. + * @see _.assignInWith + * @example + * + * function customizer(objValue, srcValue) { + * return _.isUndefined(objValue) ? srcValue : objValue; + * } + * + * var defaults = _.partialRight(_.assignWith, customizer); + * + * defaults({ 'a': 1 }, { 'b': 2 }, { 'a': 3 }); + * // => { 'a': 1, 'b': 2 } + */ + var assignWith = createAssigner(function(object, source, srcIndex, customizer) { + copyObject(source, keys(source), object, customizer); + }); + + /** + * Creates an array of values corresponding to `paths` of `object`. + * + * @static + * @memberOf _ + * @since 1.0.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {...(string|string[])} [paths] The property paths to pick. + * @returns {Array} Returns the picked values. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 3 } }, 4] }; + * + * _.at(object, ['a[0].b.c', 'a[1]']); + * // => [3, 4] + */ + var at = flatRest(baseAt); + + /** + * Creates an object that inherits from the `prototype` object. If a + * `properties` object is given, its own enumerable string keyed properties + * are assigned to the created object. + * + * @static + * @memberOf _ + * @since 2.3.0 + * @category Object + * @param {Object} prototype The object to inherit from. + * @param {Object} [properties] The properties to assign to the object. + * @returns {Object} Returns the new object. + * @example + * + * function Shape() { + * this.x = 0; + * this.y = 0; + * } + * + * function Circle() { + * Shape.call(this); + * } + * + * Circle.prototype = _.create(Shape.prototype, { + * 'constructor': Circle + * }); + * + * var circle = new Circle; + * circle instanceof Circle; + * // => true + * + * circle instanceof Shape; + * // => true + */ + function create(prototype, properties) { + var result = baseCreate(prototype); + return properties == null ? result : baseAssign(result, properties); + } + + /** + * Assigns own and inherited enumerable string keyed properties of source + * objects to the destination object for all destination properties that + * resolve to `undefined`. Source objects are applied from left to right. + * Once a property is set, additional values of the same property are ignored. + * + * **Note:** This method mutates `object`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.defaultsDeep + * @example + * + * _.defaults({ 'a': 1 }, { 'b': 2 }, { 'a': 3 }); + * // => { 'a': 1, 'b': 2 } + */ + var defaults = baseRest(function(object, sources) { + object = Object(object); + + var index = -1; + var length = sources.length; + var guard = length > 2 ? sources[2] : undefined; + + if (guard && isIterateeCall(sources[0], sources[1], guard)) { + length = 1; + } + + while (++index < length) { + var source = sources[index]; + var props = keysIn(source); + var propsIndex = -1; + var propsLength = props.length; + + while (++propsIndex < propsLength) { + var key = props[propsIndex]; + var value = object[key]; + + if (value === undefined || + (eq(value, objectProto[key]) && !hasOwnProperty.call(object, key))) { + object[key] = source[key]; + } + } + } + + return object; + }); + + /** + * This method is like `_.defaults` except that it recursively assigns + * default properties. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 3.10.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @see _.defaults + * @example + * + * _.defaultsDeep({ 'a': { 'b': 2 } }, { 'a': { 'b': 1, 'c': 3 } }); + * // => { 'a': { 'b': 2, 'c': 3 } } + */ + var defaultsDeep = baseRest(function(args) { + args.push(undefined, customDefaultsMerge); + return apply(mergeWith, undefined, args); + }); + + /** + * This method is like `_.find` except that it returns the key of the first + * element `predicate` returns truthy for instead of the element itself. + * + * @static + * @memberOf _ + * @since 1.1.0 + * @category Object + * @param {Object} object The object to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {string|undefined} Returns the key of the matched element, + * else `undefined`. + * @example + * + * var users = { + * 'barney': { 'age': 36, 'active': true }, + * 'fred': { 'age': 40, 'active': false }, + * 'pebbles': { 'age': 1, 'active': true } + * }; + * + * _.findKey(users, function(o) { return o.age < 40; }); + * // => 'barney' (iteration order is not guaranteed) + * + * // The `_.matches` iteratee shorthand. + * _.findKey(users, { 'age': 1, 'active': true }); + * // => 'pebbles' + * + * // The `_.matchesProperty` iteratee shorthand. + * _.findKey(users, ['active', false]); + * // => 'fred' + * + * // The `_.property` iteratee shorthand. + * _.findKey(users, 'active'); + * // => 'barney' + */ + function findKey(object, predicate) { + return baseFindKey(object, getIteratee(predicate, 3), baseForOwn); + } + + /** + * This method is like `_.findKey` except that it iterates over elements of + * a collection in the opposite order. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Object + * @param {Object} object The object to inspect. + * @param {Function} [predicate=_.identity] The function invoked per iteration. + * @returns {string|undefined} Returns the key of the matched element, + * else `undefined`. + * @example + * + * var users = { + * 'barney': { 'age': 36, 'active': true }, + * 'fred': { 'age': 40, 'active': false }, + * 'pebbles': { 'age': 1, 'active': true } + * }; + * + * _.findLastKey(users, function(o) { return o.age < 40; }); + * // => returns 'pebbles' assuming `_.findKey` returns 'barney' + * + * // The `_.matches` iteratee shorthand. + * _.findLastKey(users, { 'age': 36, 'active': true }); + * // => 'barney' + * + * // The `_.matchesProperty` iteratee shorthand. + * _.findLastKey(users, ['active', false]); + * // => 'fred' + * + * // The `_.property` iteratee shorthand. + * _.findLastKey(users, 'active'); + * // => 'pebbles' + */ + function findLastKey(object, predicate) { + return baseFindKey(object, getIteratee(predicate, 3), baseForOwnRight); + } + + /** + * Iterates over own and inherited enumerable string keyed properties of an + * object and invokes `iteratee` for each property. The iteratee is invoked + * with three arguments: (value, key, object). Iteratee functions may exit + * iteration early by explicitly returning `false`. + * + * @static + * @memberOf _ + * @since 0.3.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns `object`. + * @see _.forInRight + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.forIn(new Foo, function(value, key) { + * console.log(key); + * }); + * // => Logs 'a', 'b', then 'c' (iteration order is not guaranteed). + */ + function forIn(object, iteratee) { + return object == null + ? object + : baseFor(object, getIteratee(iteratee, 3), keysIn); + } + + /** + * This method is like `_.forIn` except that it iterates over properties of + * `object` in the opposite order. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns `object`. + * @see _.forIn + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.forInRight(new Foo, function(value, key) { + * console.log(key); + * }); + * // => Logs 'c', 'b', then 'a' assuming `_.forIn` logs 'a', 'b', then 'c'. + */ + function forInRight(object, iteratee) { + return object == null + ? object + : baseForRight(object, getIteratee(iteratee, 3), keysIn); + } + + /** + * Iterates over own enumerable string keyed properties of an object and + * invokes `iteratee` for each property. The iteratee is invoked with three + * arguments: (value, key, object). Iteratee functions may exit iteration + * early by explicitly returning `false`. + * + * @static + * @memberOf _ + * @since 0.3.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns `object`. + * @see _.forOwnRight + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.forOwn(new Foo, function(value, key) { + * console.log(key); + * }); + * // => Logs 'a' then 'b' (iteration order is not guaranteed). + */ + function forOwn(object, iteratee) { + return object && baseForOwn(object, getIteratee(iteratee, 3)); + } + + /** + * This method is like `_.forOwn` except that it iterates over properties of + * `object` in the opposite order. + * + * @static + * @memberOf _ + * @since 2.0.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns `object`. + * @see _.forOwn + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.forOwnRight(new Foo, function(value, key) { + * console.log(key); + * }); + * // => Logs 'b' then 'a' assuming `_.forOwn` logs 'a' then 'b'. + */ + function forOwnRight(object, iteratee) { + return object && baseForOwnRight(object, getIteratee(iteratee, 3)); + } + + /** + * Creates an array of function property names from own enumerable properties + * of `object`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to inspect. + * @returns {Array} Returns the function names. + * @see _.functionsIn + * @example + * + * function Foo() { + * this.a = _.constant('a'); + * this.b = _.constant('b'); + * } + * + * Foo.prototype.c = _.constant('c'); + * + * _.functions(new Foo); + * // => ['a', 'b'] + */ + function functions(object) { + return object == null ? [] : baseFunctions(object, keys(object)); + } + + /** + * Creates an array of function property names from own and inherited + * enumerable properties of `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to inspect. + * @returns {Array} Returns the function names. + * @see _.functions + * @example + * + * function Foo() { + * this.a = _.constant('a'); + * this.b = _.constant('b'); + * } + * + * Foo.prototype.c = _.constant('c'); + * + * _.functionsIn(new Foo); + * // => ['a', 'b', 'c'] + */ + function functionsIn(object) { + return object == null ? [] : baseFunctions(object, keysIn(object)); + } + + /** + * Gets the value at `path` of `object`. If the resolved value is + * `undefined`, the `defaultValue` is returned in its place. + * + * @static + * @memberOf _ + * @since 3.7.0 + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path of the property to get. + * @param {*} [defaultValue] The value returned for `undefined` resolved values. + * @returns {*} Returns the resolved value. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 3 } }] }; + * + * _.get(object, 'a[0].b.c'); + * // => 3 + * + * _.get(object, ['a', '0', 'b', 'c']); + * // => 3 + * + * _.get(object, 'a.b.c', 'default'); + * // => 'default' + */ + function get(object, path, defaultValue) { + var result = object == null ? undefined : baseGet(object, path); + return result === undefined ? defaultValue : result; + } + + /** + * Checks if `path` is a direct property of `object`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path to check. + * @returns {boolean} Returns `true` if `path` exists, else `false`. + * @example + * + * var object = { 'a': { 'b': 2 } }; + * var other = _.create({ 'a': _.create({ 'b': 2 }) }); + * + * _.has(object, 'a'); + * // => true + * + * _.has(object, 'a.b'); + * // => true + * + * _.has(object, ['a', 'b']); + * // => true + * + * _.has(other, 'a'); + * // => false + */ + function has(object, path) { + return object != null && hasPath(object, path, baseHas); + } + + /** + * Checks if `path` is a direct or inherited property of `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path to check. + * @returns {boolean} Returns `true` if `path` exists, else `false`. + * @example + * + * var object = _.create({ 'a': _.create({ 'b': 2 }) }); + * + * _.hasIn(object, 'a'); + * // => true + * + * _.hasIn(object, 'a.b'); + * // => true + * + * _.hasIn(object, ['a', 'b']); + * // => true + * + * _.hasIn(object, 'b'); + * // => false + */ + function hasIn(object, path) { + return object != null && hasPath(object, path, baseHasIn); + } + + /** + * Creates an object composed of the inverted keys and values of `object`. + * If `object` contains duplicate values, subsequent values overwrite + * property assignments of previous values. + * + * @static + * @memberOf _ + * @since 0.7.0 + * @category Object + * @param {Object} object The object to invert. + * @returns {Object} Returns the new inverted object. + * @example + * + * var object = { 'a': 1, 'b': 2, 'c': 1 }; + * + * _.invert(object); + * // => { '1': 'c', '2': 'b' } + */ + var invert = createInverter(function(result, value, key) { + if (value != null && + typeof value.toString != 'function') { + value = nativeObjectToString.call(value); + } + + result[value] = key; + }, constant(identity)); + + /** + * This method is like `_.invert` except that the inverted object is generated + * from the results of running each element of `object` thru `iteratee`. The + * corresponding inverted value of each inverted key is an array of keys + * responsible for generating the inverted value. The iteratee is invoked + * with one argument: (value). + * + * @static + * @memberOf _ + * @since 4.1.0 + * @category Object + * @param {Object} object The object to invert. + * @param {Function} [iteratee=_.identity] The iteratee invoked per element. + * @returns {Object} Returns the new inverted object. + * @example + * + * var object = { 'a': 1, 'b': 2, 'c': 1 }; + * + * _.invertBy(object); + * // => { '1': ['a', 'c'], '2': ['b'] } + * + * _.invertBy(object, function(value) { + * return 'group' + value; + * }); + * // => { 'group1': ['a', 'c'], 'group2': ['b'] } + */ + var invertBy = createInverter(function(result, value, key) { + if (value != null && + typeof value.toString != 'function') { + value = nativeObjectToString.call(value); + } + + if (hasOwnProperty.call(result, value)) { + result[value].push(key); + } else { + result[value] = [key]; + } + }, getIteratee); + + /** + * Invokes the method at `path` of `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path of the method to invoke. + * @param {...*} [args] The arguments to invoke the method with. + * @returns {*} Returns the result of the invoked method. + * @example + * + * var object = { 'a': [{ 'b': { 'c': [1, 2, 3, 4] } }] }; + * + * _.invoke(object, 'a[0].b.c.slice', 1, 3); + * // => [2, 3] + */ + var invoke = baseRest(baseInvoke); + + /** + * Creates an array of the own enumerable property names of `object`. + * + * **Note:** Non-object values are coerced to objects. See the + * [ES spec](http://ecma-international.org/ecma-262/7.0/#sec-object.keys) + * for more details. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.keys(new Foo); + * // => ['a', 'b'] (iteration order is not guaranteed) + * + * _.keys('hi'); + * // => ['0', '1'] + */ + function keys(object) { + return isArrayLike(object) ? arrayLikeKeys(object) : baseKeys(object); + } + + /** + * Creates an array of the own and inherited enumerable property names of `object`. + * + * **Note:** Non-object values are coerced to objects. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property names. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.keysIn(new Foo); + * // => ['a', 'b', 'c'] (iteration order is not guaranteed) + */ + function keysIn(object) { + return isArrayLike(object) ? arrayLikeKeys(object, true) : baseKeysIn(object); + } + + /** + * The opposite of `_.mapValues`; this method creates an object with the + * same values as `object` and keys generated by running each own enumerable + * string keyed property of `object` thru `iteratee`. The iteratee is invoked + * with three arguments: (value, key, object). + * + * @static + * @memberOf _ + * @since 3.8.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns the new mapped object. + * @see _.mapValues + * @example + * + * _.mapKeys({ 'a': 1, 'b': 2 }, function(value, key) { + * return key + value; + * }); + * // => { 'a1': 1, 'b2': 2 } + */ + function mapKeys(object, iteratee) { + var result = {}; + iteratee = getIteratee(iteratee, 3); + + baseForOwn(object, function(value, key, object) { + baseAssignValue(result, iteratee(value, key, object), value); + }); + return result; + } + + /** + * Creates an object with the same keys as `object` and values generated + * by running each own enumerable string keyed property of `object` thru + * `iteratee`. The iteratee is invoked with three arguments: + * (value, key, object). + * + * @static + * @memberOf _ + * @since 2.4.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @returns {Object} Returns the new mapped object. + * @see _.mapKeys + * @example + * + * var users = { + * 'fred': { 'user': 'fred', 'age': 40 }, + * 'pebbles': { 'user': 'pebbles', 'age': 1 } + * }; + * + * _.mapValues(users, function(o) { return o.age; }); + * // => { 'fred': 40, 'pebbles': 1 } (iteration order is not guaranteed) + * + * // The `_.property` iteratee shorthand. + * _.mapValues(users, 'age'); + * // => { 'fred': 40, 'pebbles': 1 } (iteration order is not guaranteed) + */ + function mapValues(object, iteratee) { + var result = {}; + iteratee = getIteratee(iteratee, 3); + + baseForOwn(object, function(value, key, object) { + baseAssignValue(result, key, iteratee(value, key, object)); + }); + return result; + } + + /** + * This method is like `_.assign` except that it recursively merges own and + * inherited enumerable string keyed properties of source objects into the + * destination object. Source properties that resolve to `undefined` are + * skipped if a destination value exists. Array and plain object properties + * are merged recursively. Other objects and value types are overridden by + * assignment. Source objects are applied from left to right. Subsequent + * sources overwrite property assignments of previous sources. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 0.5.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} [sources] The source objects. + * @returns {Object} Returns `object`. + * @example + * + * var object = { + * 'a': [{ 'b': 2 }, { 'd': 4 }] + * }; + * + * var other = { + * 'a': [{ 'c': 3 }, { 'e': 5 }] + * }; + * + * _.merge(object, other); + * // => { 'a': [{ 'b': 2, 'c': 3 }, { 'd': 4, 'e': 5 }] } + */ + var merge = createAssigner(function(object, source, srcIndex) { + baseMerge(object, source, srcIndex); + }); + + /** + * This method is like `_.merge` except that it accepts `customizer` which + * is invoked to produce the merged values of the destination and source + * properties. If `customizer` returns `undefined`, merging is handled by the + * method instead. The `customizer` is invoked with six arguments: + * (objValue, srcValue, key, object, source, stack). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The destination object. + * @param {...Object} sources The source objects. + * @param {Function} customizer The function to customize assigned values. + * @returns {Object} Returns `object`. + * @example + * + * function customizer(objValue, srcValue) { + * if (_.isArray(objValue)) { + * return objValue.concat(srcValue); + * } + * } + * + * var object = { 'a': [1], 'b': [2] }; + * var other = { 'a': [3], 'b': [4] }; + * + * _.mergeWith(object, other, customizer); + * // => { 'a': [1, 3], 'b': [2, 4] } + */ + var mergeWith = createAssigner(function(object, source, srcIndex, customizer) { + baseMerge(object, source, srcIndex, customizer); + }); + + /** + * The opposite of `_.pick`; this method creates an object composed of the + * own and inherited enumerable property paths of `object` that are not omitted. + * + * **Note:** This method is considerably slower than `_.pick`. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The source object. + * @param {...(string|string[])} [paths] The property paths to omit. + * @returns {Object} Returns the new object. + * @example + * + * var object = { 'a': 1, 'b': '2', 'c': 3 }; + * + * _.omit(object, ['a', 'c']); + * // => { 'b': '2' } + */ + var omit = flatRest(function(object, paths) { + var result = {}; + if (object == null) { + return result; + } + var isDeep = false; + paths = arrayMap(paths, function(path) { + path = castPath(path, object); + isDeep || (isDeep = path.length > 1); + return path; + }); + copyObject(object, getAllKeysIn(object), result); + if (isDeep) { + result = baseClone(result, CLONE_DEEP_FLAG | CLONE_FLAT_FLAG | CLONE_SYMBOLS_FLAG, customOmitClone); + } + var length = paths.length; + while (length--) { + baseUnset(result, paths[length]); + } + return result; + }); + + /** + * The opposite of `_.pickBy`; this method creates an object composed of + * the own and inherited enumerable string keyed properties of `object` that + * `predicate` doesn't return truthy for. The predicate is invoked with two + * arguments: (value, key). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The source object. + * @param {Function} [predicate=_.identity] The function invoked per property. + * @returns {Object} Returns the new object. + * @example + * + * var object = { 'a': 1, 'b': '2', 'c': 3 }; + * + * _.omitBy(object, _.isNumber); + * // => { 'b': '2' } + */ + function omitBy(object, predicate) { + return pickBy(object, negate(getIteratee(predicate))); + } + + /** + * Creates an object composed of the picked `object` properties. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The source object. + * @param {...(string|string[])} [paths] The property paths to pick. + * @returns {Object} Returns the new object. + * @example + * + * var object = { 'a': 1, 'b': '2', 'c': 3 }; + * + * _.pick(object, ['a', 'c']); + * // => { 'a': 1, 'c': 3 } + */ + var pick = flatRest(function(object, paths) { + return object == null ? {} : basePick(object, paths); + }); + + /** + * Creates an object composed of the `object` properties `predicate` returns + * truthy for. The predicate is invoked with two arguments: (value, key). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The source object. + * @param {Function} [predicate=_.identity] The function invoked per property. + * @returns {Object} Returns the new object. + * @example + * + * var object = { 'a': 1, 'b': '2', 'c': 3 }; + * + * _.pickBy(object, _.isNumber); + * // => { 'a': 1, 'c': 3 } + */ + function pickBy(object, predicate) { + if (object == null) { + return {}; + } + var props = arrayMap(getAllKeysIn(object), function(prop) { + return [prop]; + }); + predicate = getIteratee(predicate); + return basePickBy(object, props, function(value, path) { + return predicate(value, path[0]); + }); + } + + /** + * This method is like `_.get` except that if the resolved value is a + * function it's invoked with the `this` binding of its parent object and + * its result is returned. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @param {Array|string} path The path of the property to resolve. + * @param {*} [defaultValue] The value returned for `undefined` resolved values. + * @returns {*} Returns the resolved value. + * @example + * + * var object = { 'a': [{ 'b': { 'c1': 3, 'c2': _.constant(4) } }] }; + * + * _.result(object, 'a[0].b.c1'); + * // => 3 + * + * _.result(object, 'a[0].b.c2'); + * // => 4 + * + * _.result(object, 'a[0].b.c3', 'default'); + * // => 'default' + * + * _.result(object, 'a[0].b.c3', _.constant('default')); + * // => 'default' + */ + function result(object, path, defaultValue) { + path = castPath(path, object); + + var index = -1, + length = path.length; + + // Ensure the loop is entered when path is empty. + if (!length) { + length = 1; + object = undefined; + } + while (++index < length) { + var value = object == null ? undefined : object[toKey(path[index])]; + if (value === undefined) { + index = length; + value = defaultValue; + } + object = isFunction(value) ? value.call(object) : value; + } + return object; + } + + /** + * Sets the value at `path` of `object`. If a portion of `path` doesn't exist, + * it's created. Arrays are created for missing index properties while objects + * are created for all other missing properties. Use `_.setWith` to customize + * `path` creation. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 3.7.0 + * @category Object + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to set. + * @param {*} value The value to set. + * @returns {Object} Returns `object`. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 3 } }] }; + * + * _.set(object, 'a[0].b.c', 4); + * console.log(object.a[0].b.c); + * // => 4 + * + * _.set(object, ['x', '0', 'y', 'z'], 5); + * console.log(object.x[0].y.z); + * // => 5 + */ + function set(object, path, value) { + return object == null ? object : baseSet(object, path, value); + } + + /** + * This method is like `_.set` except that it accepts `customizer` which is + * invoked to produce the objects of `path`. If `customizer` returns `undefined` + * path creation is handled by the method instead. The `customizer` is invoked + * with three arguments: (nsValue, key, nsObject). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to set. + * @param {*} value The value to set. + * @param {Function} [customizer] The function to customize assigned values. + * @returns {Object} Returns `object`. + * @example + * + * var object = {}; + * + * _.setWith(object, '[0][1]', 'a', Object); + * // => { '0': { '1': 'a' } } + */ + function setWith(object, path, value, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return object == null ? object : baseSet(object, path, value, customizer); + } + + /** + * Creates an array of own enumerable string keyed-value pairs for `object` + * which can be consumed by `_.fromPairs`. If `object` is a map or set, its + * entries are returned. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @alias entries + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the key-value pairs. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.toPairs(new Foo); + * // => [['a', 1], ['b', 2]] (iteration order is not guaranteed) + */ + var toPairs = createToPairs(keys); + + /** + * Creates an array of own and inherited enumerable string keyed-value pairs + * for `object` which can be consumed by `_.fromPairs`. If `object` is a map + * or set, its entries are returned. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @alias entriesIn + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the key-value pairs. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.toPairsIn(new Foo); + * // => [['a', 1], ['b', 2], ['c', 3]] (iteration order is not guaranteed) + */ + var toPairsIn = createToPairs(keysIn); + + /** + * An alternative to `_.reduce`; this method transforms `object` to a new + * `accumulator` object which is the result of running each of its own + * enumerable string keyed properties thru `iteratee`, with each invocation + * potentially mutating the `accumulator` object. If `accumulator` is not + * provided, a new object with the same `[[Prototype]]` will be used. The + * iteratee is invoked with four arguments: (accumulator, value, key, object). + * Iteratee functions may exit iteration early by explicitly returning `false`. + * + * @static + * @memberOf _ + * @since 1.3.0 + * @category Object + * @param {Object} object The object to iterate over. + * @param {Function} [iteratee=_.identity] The function invoked per iteration. + * @param {*} [accumulator] The custom accumulator value. + * @returns {*} Returns the accumulated value. + * @example + * + * _.transform([2, 3, 4], function(result, n) { + * result.push(n *= n); + * return n % 2 == 0; + * }, []); + * // => [4, 9] + * + * _.transform({ 'a': 1, 'b': 2, 'c': 1 }, function(result, value, key) { + * (result[value] || (result[value] = [])).push(key); + * }, {}); + * // => { '1': ['a', 'c'], '2': ['b'] } + */ + function transform(object, iteratee, accumulator) { + var isArr = isArray(object), + isArrLike = isArr || isBuffer(object) || isTypedArray(object); + + iteratee = getIteratee(iteratee, 4); + if (accumulator == null) { + var Ctor = object && object.constructor; + if (isArrLike) { + accumulator = isArr ? new Ctor : []; + } + else if (isObject(object)) { + accumulator = isFunction(Ctor) ? baseCreate(getPrototype(object)) : {}; + } + else { + accumulator = {}; + } + } + (isArrLike ? arrayEach : baseForOwn)(object, function(value, index, object) { + return iteratee(accumulator, value, index, object); + }); + return accumulator; + } + + /** + * Removes the property at `path` of `object`. + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Object + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to unset. + * @returns {boolean} Returns `true` if the property is deleted, else `false`. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 7 } }] }; + * _.unset(object, 'a[0].b.c'); + * // => true + * + * console.log(object); + * // => { 'a': [{ 'b': {} }] }; + * + * _.unset(object, ['a', '0', 'b', 'c']); + * // => true + * + * console.log(object); + * // => { 'a': [{ 'b': {} }] }; + */ + function unset(object, path) { + return object == null ? true : baseUnset(object, path); + } + + /** + * This method is like `_.set` except that accepts `updater` to produce the + * value to set. Use `_.updateWith` to customize `path` creation. The `updater` + * is invoked with one argument: (value). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.6.0 + * @category Object + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to set. + * @param {Function} updater The function to produce the updated value. + * @returns {Object} Returns `object`. + * @example + * + * var object = { 'a': [{ 'b': { 'c': 3 } }] }; + * + * _.update(object, 'a[0].b.c', function(n) { return n * n; }); + * console.log(object.a[0].b.c); + * // => 9 + * + * _.update(object, 'x[0].y.z', function(n) { return n ? n + 1 : 0; }); + * console.log(object.x[0].y.z); + * // => 0 + */ + function update(object, path, updater) { + return object == null ? object : baseUpdate(object, path, castFunction(updater)); + } + + /** + * This method is like `_.update` except that it accepts `customizer` which is + * invoked to produce the objects of `path`. If `customizer` returns `undefined` + * path creation is handled by the method instead. The `customizer` is invoked + * with three arguments: (nsValue, key, nsObject). + * + * **Note:** This method mutates `object`. + * + * @static + * @memberOf _ + * @since 4.6.0 + * @category Object + * @param {Object} object The object to modify. + * @param {Array|string} path The path of the property to set. + * @param {Function} updater The function to produce the updated value. + * @param {Function} [customizer] The function to customize assigned values. + * @returns {Object} Returns `object`. + * @example + * + * var object = {}; + * + * _.updateWith(object, '[0][1]', _.constant('a'), Object); + * // => { '0': { '1': 'a' } } + */ + function updateWith(object, path, updater, customizer) { + customizer = typeof customizer == 'function' ? customizer : undefined; + return object == null ? object : baseUpdate(object, path, castFunction(updater), customizer); + } + + /** + * Creates an array of the own enumerable string keyed property values of `object`. + * + * **Note:** Non-object values are coerced to objects. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property values. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.values(new Foo); + * // => [1, 2] (iteration order is not guaranteed) + * + * _.values('hi'); + * // => ['h', 'i'] + */ + function values(object) { + return object == null ? [] : baseValues(object, keys(object)); + } + + /** + * Creates an array of the own and inherited enumerable string keyed property + * values of `object`. + * + * **Note:** Non-object values are coerced to objects. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category Object + * @param {Object} object The object to query. + * @returns {Array} Returns the array of property values. + * @example + * + * function Foo() { + * this.a = 1; + * this.b = 2; + * } + * + * Foo.prototype.c = 3; + * + * _.valuesIn(new Foo); + * // => [1, 2, 3] (iteration order is not guaranteed) + */ + function valuesIn(object) { + return object == null ? [] : baseValues(object, keysIn(object)); + } + + /*------------------------------------------------------------------------*/ + + /** + * Clamps `number` within the inclusive `lower` and `upper` bounds. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category Number + * @param {number} number The number to clamp. + * @param {number} [lower] The lower bound. + * @param {number} upper The upper bound. + * @returns {number} Returns the clamped number. + * @example + * + * _.clamp(-10, -5, 5); + * // => -5 + * + * _.clamp(10, -5, 5); + * // => 5 + */ + function clamp(number, lower, upper) { + if (upper === undefined) { + upper = lower; + lower = undefined; + } + if (upper !== undefined) { + upper = toNumber(upper); + upper = upper === upper ? upper : 0; + } + if (lower !== undefined) { + lower = toNumber(lower); + lower = lower === lower ? lower : 0; + } + return baseClamp(toNumber(number), lower, upper); + } + + /** + * Checks if `n` is between `start` and up to, but not including, `end`. If + * `end` is not specified, it's set to `start` with `start` then set to `0`. + * If `start` is greater than `end` the params are swapped to support + * negative ranges. + * + * @static + * @memberOf _ + * @since 3.3.0 + * @category Number + * @param {number} number The number to check. + * @param {number} [start=0] The start of the range. + * @param {number} end The end of the range. + * @returns {boolean} Returns `true` if `number` is in the range, else `false`. + * @see _.range, _.rangeRight + * @example + * + * _.inRange(3, 2, 4); + * // => true + * + * _.inRange(4, 8); + * // => true + * + * _.inRange(4, 2); + * // => false + * + * _.inRange(2, 2); + * // => false + * + * _.inRange(1.2, 2); + * // => true + * + * _.inRange(5.2, 4); + * // => false + * + * _.inRange(-3, -2, -6); + * // => true + */ + function inRange(number, start, end) { + start = toFinite(start); + if (end === undefined) { + end = start; + start = 0; + } else { + end = toFinite(end); + } + number = toNumber(number); + return baseInRange(number, start, end); + } + + /** + * Produces a random number between the inclusive `lower` and `upper` bounds. + * If only one argument is provided a number between `0` and the given number + * is returned. If `floating` is `true`, or either `lower` or `upper` are + * floats, a floating-point number is returned instead of an integer. + * + * **Note:** JavaScript follows the IEEE-754 standard for resolving + * floating-point values which can produce unexpected results. + * + * @static + * @memberOf _ + * @since 0.7.0 + * @category Number + * @param {number} [lower=0] The lower bound. + * @param {number} [upper=1] The upper bound. + * @param {boolean} [floating] Specify returning a floating-point number. + * @returns {number} Returns the random number. + * @example + * + * _.random(0, 5); + * // => an integer between 0 and 5 + * + * _.random(5); + * // => also an integer between 0 and 5 + * + * _.random(5, true); + * // => a floating-point number between 0 and 5 + * + * _.random(1.2, 5.2); + * // => a floating-point number between 1.2 and 5.2 + */ + function random(lower, upper, floating) { + if (floating && typeof floating != 'boolean' && isIterateeCall(lower, upper, floating)) { + upper = floating = undefined; + } + if (floating === undefined) { + if (typeof upper == 'boolean') { + floating = upper; + upper = undefined; + } + else if (typeof lower == 'boolean') { + floating = lower; + lower = undefined; + } + } + if (lower === undefined && upper === undefined) { + lower = 0; + upper = 1; + } + else { + lower = toFinite(lower); + if (upper === undefined) { + upper = lower; + lower = 0; + } else { + upper = toFinite(upper); + } + } + if (lower > upper) { + var temp = lower; + lower = upper; + upper = temp; + } + if (floating || lower % 1 || upper % 1) { + var rand = nativeRandom(); + return nativeMin(lower + (rand * (upper - lower + freeParseFloat('1e-' + ((rand + '').length - 1)))), upper); + } + return baseRandom(lower, upper); + } + + /*------------------------------------------------------------------------*/ + + /** + * Converts `string` to [camel case](https://en.wikipedia.org/wiki/CamelCase). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the camel cased string. + * @example + * + * _.camelCase('Foo Bar'); + * // => 'fooBar' + * + * _.camelCase('--foo-bar--'); + * // => 'fooBar' + * + * _.camelCase('__FOO_BAR__'); + * // => 'fooBar' + */ + var camelCase = createCompounder(function(result, word, index) { + word = word.toLowerCase(); + return result + (index ? capitalize(word) : word); + }); + + /** + * Converts the first character of `string` to upper case and the remaining + * to lower case. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to capitalize. + * @returns {string} Returns the capitalized string. + * @example + * + * _.capitalize('FRED'); + * // => 'Fred' + */ + function capitalize(string) { + return upperFirst(toString(string).toLowerCase()); + } + + /** + * Deburrs `string` by converting + * [Latin-1 Supplement](https://en.wikipedia.org/wiki/Latin-1_Supplement_(Unicode_block)#Character_table) + * and [Latin Extended-A](https://en.wikipedia.org/wiki/Latin_Extended-A) + * letters to basic Latin letters and removing + * [combining diacritical marks](https://en.wikipedia.org/wiki/Combining_Diacritical_Marks). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to deburr. + * @returns {string} Returns the deburred string. + * @example + * + * _.deburr('déjà vu'); + * // => 'deja vu' + */ + function deburr(string) { + string = toString(string); + return string && string.replace(reLatin, deburrLetter).replace(reComboMark, ''); + } + + /** + * Checks if `string` ends with the given target string. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to inspect. + * @param {string} [target] The string to search for. + * @param {number} [position=string.length] The position to search up to. + * @returns {boolean} Returns `true` if `string` ends with `target`, + * else `false`. + * @example + * + * _.endsWith('abc', 'c'); + * // => true + * + * _.endsWith('abc', 'b'); + * // => false + * + * _.endsWith('abc', 'b', 2); + * // => true + */ + function endsWith(string, target, position) { + string = toString(string); + target = baseToString(target); + + var length = string.length; + position = position === undefined + ? length + : baseClamp(toInteger(position), 0, length); + + var end = position; + position -= target.length; + return position >= 0 && string.slice(position, end) == target; + } + + /** + * Converts the characters "&", "<", ">", '"', and "'" in `string` to their + * corresponding HTML entities. + * + * **Note:** No other characters are escaped. To escape additional + * characters use a third-party library like [_he_](https://mths.be/he). + * + * Though the ">" character is escaped for symmetry, characters like + * ">" and "/" don't need escaping in HTML and have no special meaning + * unless they're part of a tag or unquoted attribute value. See + * [Mathias Bynens's article](https://mathiasbynens.be/notes/ambiguous-ampersands) + * (under "semi-related fun fact") for more details. + * + * When working with HTML you should always + * [quote attribute values](http://wonko.com/post/html-escaping) to reduce + * XSS vectors. + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category String + * @param {string} [string=''] The string to escape. + * @returns {string} Returns the escaped string. + * @example + * + * _.escape('fred, barney, & pebbles'); + * // => 'fred, barney, & pebbles' + */ + function escape(string) { + string = toString(string); + return (string && reHasUnescapedHtml.test(string)) + ? string.replace(reUnescapedHtml, escapeHtmlChar) + : string; + } + + /** + * Escapes the `RegExp` special characters "^", "$", "\", ".", "*", "+", + * "?", "(", ")", "[", "]", "{", "}", and "|" in `string`. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to escape. + * @returns {string} Returns the escaped string. + * @example + * + * _.escapeRegExp('[lodash](https://lodash.com/)'); + * // => '\[lodash\]\(https://lodash\.com/\)' + */ + function escapeRegExp(string) { + string = toString(string); + return (string && reHasRegExpChar.test(string)) + ? string.replace(reRegExpChar, '\\$&') + : string; + } + + /** + * Converts `string` to + * [kebab case](https://en.wikipedia.org/wiki/Letter_case#Special_case_styles). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the kebab cased string. + * @example + * + * _.kebabCase('Foo Bar'); + * // => 'foo-bar' + * + * _.kebabCase('fooBar'); + * // => 'foo-bar' + * + * _.kebabCase('__FOO_BAR__'); + * // => 'foo-bar' + */ + var kebabCase = createCompounder(function(result, word, index) { + return result + (index ? '-' : '') + word.toLowerCase(); + }); + + /** + * Converts `string`, as space separated words, to lower case. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the lower cased string. + * @example + * + * _.lowerCase('--Foo-Bar--'); + * // => 'foo bar' + * + * _.lowerCase('fooBar'); + * // => 'foo bar' + * + * _.lowerCase('__FOO_BAR__'); + * // => 'foo bar' + */ + var lowerCase = createCompounder(function(result, word, index) { + return result + (index ? ' ' : '') + word.toLowerCase(); + }); + + /** + * Converts the first character of `string` to lower case. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the converted string. + * @example + * + * _.lowerFirst('Fred'); + * // => 'fred' + * + * _.lowerFirst('FRED'); + * // => 'fRED' + */ + var lowerFirst = createCaseFirst('toLowerCase'); + + /** + * Pads `string` on the left and right sides if it's shorter than `length`. + * Padding characters are truncated if they can't be evenly divided by `length`. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to pad. + * @param {number} [length=0] The padding length. + * @param {string} [chars=' '] The string used as padding. + * @returns {string} Returns the padded string. + * @example + * + * _.pad('abc', 8); + * // => ' abc ' + * + * _.pad('abc', 8, '_-'); + * // => '_-abc_-_' + * + * _.pad('abc', 3); + * // => 'abc' + */ + function pad(string, length, chars) { + string = toString(string); + length = toInteger(length); + + var strLength = length ? stringSize(string) : 0; + if (!length || strLength >= length) { + return string; + } + var mid = (length - strLength) / 2; + return ( + createPadding(nativeFloor(mid), chars) + + string + + createPadding(nativeCeil(mid), chars) + ); + } + + /** + * Pads `string` on the right side if it's shorter than `length`. Padding + * characters are truncated if they exceed `length`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to pad. + * @param {number} [length=0] The padding length. + * @param {string} [chars=' '] The string used as padding. + * @returns {string} Returns the padded string. + * @example + * + * _.padEnd('abc', 6); + * // => 'abc ' + * + * _.padEnd('abc', 6, '_-'); + * // => 'abc_-_' + * + * _.padEnd('abc', 3); + * // => 'abc' + */ + function padEnd(string, length, chars) { + string = toString(string); + length = toInteger(length); + + var strLength = length ? stringSize(string) : 0; + return (length && strLength < length) + ? (string + createPadding(length - strLength, chars)) + : string; + } + + /** + * Pads `string` on the left side if it's shorter than `length`. Padding + * characters are truncated if they exceed `length`. + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to pad. + * @param {number} [length=0] The padding length. + * @param {string} [chars=' '] The string used as padding. + * @returns {string} Returns the padded string. + * @example + * + * _.padStart('abc', 6); + * // => ' abc' + * + * _.padStart('abc', 6, '_-'); + * // => '_-_abc' + * + * _.padStart('abc', 3); + * // => 'abc' + */ + function padStart(string, length, chars) { + string = toString(string); + length = toInteger(length); + + var strLength = length ? stringSize(string) : 0; + return (length && strLength < length) + ? (createPadding(length - strLength, chars) + string) + : string; + } + + /** + * Converts `string` to an integer of the specified radix. If `radix` is + * `undefined` or `0`, a `radix` of `10` is used unless `value` is a + * hexadecimal, in which case a `radix` of `16` is used. + * + * **Note:** This method aligns with the + * [ES5 implementation](https://es5.github.io/#x15.1.2.2) of `parseInt`. + * + * @static + * @memberOf _ + * @since 1.1.0 + * @category String + * @param {string} string The string to convert. + * @param {number} [radix=10] The radix to interpret `value` by. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {number} Returns the converted integer. + * @example + * + * _.parseInt('08'); + * // => 8 + * + * _.map(['6', '08', '10'], _.parseInt); + * // => [6, 8, 10] + */ + function parseInt(string, radix, guard) { + if (guard || radix == null) { + radix = 0; + } else if (radix) { + radix = +radix; + } + return nativeParseInt(toString(string).replace(reTrimStart, ''), radix || 0); + } + + /** + * Repeats the given string `n` times. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to repeat. + * @param {number} [n=1] The number of times to repeat the string. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {string} Returns the repeated string. + * @example + * + * _.repeat('*', 3); + * // => '***' + * + * _.repeat('abc', 2); + * // => 'abcabc' + * + * _.repeat('abc', 0); + * // => '' + */ + function repeat(string, n, guard) { + if ((guard ? isIterateeCall(string, n, guard) : n === undefined)) { + n = 1; + } else { + n = toInteger(n); + } + return baseRepeat(toString(string), n); + } + + /** + * Replaces matches for `pattern` in `string` with `replacement`. + * + * **Note:** This method is based on + * [`String#replace`](https://mdn.io/String/replace). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to modify. + * @param {RegExp|string} pattern The pattern to replace. + * @param {Function|string} replacement The match replacement. + * @returns {string} Returns the modified string. + * @example + * + * _.replace('Hi Fred', 'Fred', 'Barney'); + * // => 'Hi Barney' + */ + function replace() { + var args = arguments, + string = toString(args[0]); + + return args.length < 3 ? string : string.replace(args[1], args[2]); + } + + /** + * Converts `string` to + * [snake case](https://en.wikipedia.org/wiki/Snake_case). + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the snake cased string. + * @example + * + * _.snakeCase('Foo Bar'); + * // => 'foo_bar' + * + * _.snakeCase('fooBar'); + * // => 'foo_bar' + * + * _.snakeCase('--FOO-BAR--'); + * // => 'foo_bar' + */ + var snakeCase = createCompounder(function(result, word, index) { + return result + (index ? '_' : '') + word.toLowerCase(); + }); + + /** + * Splits `string` by `separator`. + * + * **Note:** This method is based on + * [`String#split`](https://mdn.io/String/split). + * + * @static + * @memberOf _ + * @since 4.0.0 + * @category String + * @param {string} [string=''] The string to split. + * @param {RegExp|string} separator The separator pattern to split by. + * @param {number} [limit] The length to truncate results to. + * @returns {Array} Returns the string segments. + * @example + * + * _.split('a-b-c', '-', 2); + * // => ['a', 'b'] + */ + function split(string, separator, limit) { + if (limit && typeof limit != 'number' && isIterateeCall(string, separator, limit)) { + separator = limit = undefined; + } + limit = limit === undefined ? MAX_ARRAY_LENGTH : limit >>> 0; + if (!limit) { + return []; + } + string = toString(string); + if (string && ( + typeof separator == 'string' || + (separator != null && !isRegExp(separator)) + )) { + separator = baseToString(separator); + if (!separator && hasUnicode(string)) { + return castSlice(stringToArray(string), 0, limit); + } + } + return string.split(separator, limit); + } + + /** + * Converts `string` to + * [start case](https://en.wikipedia.org/wiki/Letter_case#Stylistic_or_specialised_usage). + * + * @static + * @memberOf _ + * @since 3.1.0 + * @category String + * @param {string} [string=''] The string to convert. + * @returns {string} Returns the start cased string. + * @example + * + * _.startCase('--foo-bar--'); + * // => 'Foo Bar' + * + * _.startCase('fooBar'); + * // => 'Foo Bar' + * + * _.startCase('__FOO_BAR__'); + * // => 'FOO BAR' + */ + var startCase = createCompounder(function(result, word, index) { + return result + (index ? ' ' : '') + upperFirst(word); + }); + + /** + * Checks if `string` starts with the given target string. + * + * @static + * @memberOf _ + * @since 3.0.0 + * @category String + * @param {string} [string=''] The string to inspect. + * @param {string} [target] The string to search for. + * @param {number} [position=0] The position to search from. + * @returns {boolean} Returns `true` if `string` starts with `target`, + * else `false`. + * @example + * + * _.startsWith('abc', 'a'); + * // => true + * + * _.startsWith('abc', 'b'); + * // => false + * + * _.startsWith('abc', 'b', 1); + * // => true + */ + function startsWith(string, target, position) { + string = toString(string); + position = position == null + ? 0 + : baseClamp(toInteger(position), 0, string.length); + + target = baseToString(target); + return string.slice(position, position + target.length) == target; + } + + /** + * Creates a compiled template function that can interpolate data properties + * in "interpolate" delimiters, HTML-escape interpolated data properties in + * "escape" delimiters, and execute JavaScript in "evaluate" delimiters. Data + * properties may be accessed as free variables in the template. If a setting + * object is given, it takes precedence over `_.templateSettings` values. + * + * **Note:** In the development build `_.template` utilizes + * [sourceURLs](http://www.html5rocks.com/en/tutorials/developertools/sourcemaps/#toc-sourceurl) + * for easier debugging. + * + * For more information on precompiling templates see + * [lodash's custom builds documentation](https://lodash.com/custom-builds). + * + * For more information on Chrome extension sandboxes see + * [Chrome's extensions documentation](https://developer.chrome.com/extensions/sandboxingEval). + * + * @static + * @since 0.1.0 + * @memberOf _ + * @category String + * @param {string} [string=''] The template string. + * @param {Object} [options={}] The options object. + * @param {RegExp} [options.escape=_.templateSettings.escape] + * The HTML "escape" delimiter. + * @param {RegExp} [options.evaluate=_.templateSettings.evaluate] + * The "evaluate" delimiter. + * @param {Object} [options.imports=_.templateSettings.imports] + * An object to import into the template as free variables. + * @param {RegExp} [options.interpolate=_.templateSettings.interpolate] + * The "interpolate" delimiter. + * @param {string} [options.sourceURL='lodash.templateSources[n]'] + * The sourceURL of the compiled template. + * @param {string} [options.variable='obj'] + * The data object variable name. + * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. + * @returns {Function} Returns the compiled template function. + * @example + * + * // Use the "interpolate" delimiter to create a compiled template. + * var compiled = _.template('hello <%= user %>!'); + * compiled({ 'user': 'fred' }); + * // => 'hello fred!' + * + * // Use the HTML "escape" delimiter to escape data property values. + * var compiled = _.template('<%- value %>'); + * compiled({ 'value': '