From 1bfb14d12411f6872d897f96865ae0c9cebe2fe3 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 30 Dec 2024 08:49:33 +0000 Subject: [PATCH] chore: release v1.14.0 base revision: 886c9492a8998532071ff2f0a949e36790de0fc1 --- apply/action.yaml | 4 +- check-terraform-skip/action.yaml | 2 +- conftest/action.yaml | 2 +- create-follow-up-pr/action.yaml | 4 +- create-scaffold-module-pr/action.yaml | 2 +- create-scaffold-pr/action.yaml | 4 +- export-aws-secrets-manager/action.yaml | 2 +- export-secrets/action.yaml | 2 +- generate-config-out/action.yaml | 2 +- get-global-config/action.yaml | 2 +- get-target-config/action.yaml | 2 +- js/dist/LICENSE | 11868 +++ js/dist/index.js | 76190 ++++++++++++++++++ list-changed-modules/action.yaml | 2 +- list-module-callers/action.yaml | 2 +- list-targets-with-changed-files/action.yaml | 2 +- list-targets/action.yaml | 10 +- list-working-dirs/action.yaml | 2 +- plan/action.yaml | 4 +- scaffold-module/action.yaml | 2 +- scaffold-tfmigrate/action.yaml | 4 +- scaffold-working-dir/action.yaml | 4 +- set-drift-env/action.yaml | 2 +- setup/action.yaml | 10 +- terraform-apply/action.yaml | 6 +- terraform-plan/action.yaml | 8 +- test-module/action.yaml | 6 +- test/action.yaml | 8 +- tfmigrate-apply/action.yaml | 4 +- tfmigrate-plan/action.yaml | 6 +- update-drift-issue/action.yaml | 2 +- 31 files changed, 88114 insertions(+), 56 deletions(-) create mode 100644 js/dist/LICENSE create mode 100644 js/dist/index.js diff --git a/apply/action.yaml b/apply/action.yaml index f96ad179d..87dc19258 100644 --- a/apply/action.yaml +++ b/apply/action.yaml @@ -16,7 +16,7 @@ inputs: runs: using: composite steps: - - uses: suzuki-shunsuke/tfaction/terraform-apply@main + - uses: suzuki-shunsuke/tfaction/terraform-apply@v1.14.0 if: env.TFACTION_JOB_TYPE == 'terraform' with: # pull-requests:write - Create pull request comments and labels @@ -24,7 +24,7 @@ runs: # contents:write - Update pull request branches github_token: ${{inputs.github_token}} - - uses: suzuki-shunsuke/tfaction/tfmigrate-apply@main + - uses: suzuki-shunsuke/tfaction/tfmigrate-apply@v1.14.0 if: env.TFACTION_JOB_TYPE == 'tfmigrate' with: # pull-requests:write - Create pull request comments diff --git a/check-terraform-skip/action.yaml b/check-terraform-skip/action.yaml index d7d468e3b..591dcfbf2 100644 --- a/check-terraform-skip/action.yaml +++ b/check-terraform-skip/action.yaml @@ -20,7 +20,7 @@ runs: - run: | echo '::warning::This action was deprecated. Please use suzuki-shunsuke/tfaction/js action.' shell: bash - - uses: suzuki-shunsuke/tfaction/js@main + - uses: suzuki-shunsuke/tfaction/js@v1.14.0 id: main with: action: check-terraform-skip diff --git a/conftest/action.yaml b/conftest/action.yaml index c119ab7bc..baeee13e8 100644 --- a/conftest/action.yaml +++ b/conftest/action.yaml @@ -18,7 +18,7 @@ runs: - run: echo "value=$GITHUB_ACTION_PATH" >> "$GITHUB_OUTPUT" shell: bash id: action_path - - uses: suzuki-shunsuke/tfaction/js@main + - uses: suzuki-shunsuke/tfaction/js@v1.14.0 id: main with: action: conftest diff --git a/create-follow-up-pr/action.yaml b/create-follow-up-pr/action.yaml index 7d50610ae..dad710e3c 100644 --- a/create-follow-up-pr/action.yaml +++ b/create-follow-up-pr/action.yaml @@ -13,11 +13,11 @@ inputs: runs: using: composite steps: - - uses: suzuki-shunsuke/tfaction/js@main + - uses: suzuki-shunsuke/tfaction/js@v1.14.0 id: global-config with: action: get-global-config - - uses: suzuki-shunsuke/tfaction/js@main + - uses: suzuki-shunsuke/tfaction/js@v1.14.0 id: target-config with: action: get-target-config diff --git a/create-scaffold-module-pr/action.yaml b/create-scaffold-module-pr/action.yaml index 220e51e23..db472d8ec 100644 --- a/create-scaffold-module-pr/action.yaml +++ b/create-scaffold-module-pr/action.yaml @@ -23,7 +23,7 @@ runs: shell: bash if: env.TFACTION_MODULE_PATH == '' - - uses: suzuki-shunsuke/tfaction/js@main + - uses: suzuki-shunsuke/tfaction/js@v1.14.0 id: global-config with: action: get-global-config diff --git a/create-scaffold-pr/action.yaml b/create-scaffold-pr/action.yaml index aa99d4227..bd813ad89 100644 --- a/create-scaffold-pr/action.yaml +++ b/create-scaffold-pr/action.yaml @@ -11,11 +11,11 @@ inputs: runs: using: composite steps: - - uses: suzuki-shunsuke/tfaction/js@main + - uses: suzuki-shunsuke/tfaction/js@v1.14.0 id: global-config with: action: get-global-config - - uses: suzuki-shunsuke/tfaction/js@main + - uses: suzuki-shunsuke/tfaction/js@v1.14.0 id: target-config with: action: get-target-config diff --git a/export-aws-secrets-manager/action.yaml b/export-aws-secrets-manager/action.yaml index 7d8a07cd4..1e51617bd 100644 --- a/export-aws-secrets-manager/action.yaml +++ b/export-aws-secrets-manager/action.yaml @@ -6,7 +6,7 @@ runs: - run: | echo '::warning::This action was deprecated. Please use suzuki-shunsuke/tfaction/js action.' shell: bash - - uses: suzuki-shunsuke/tfaction/js@main + - uses: suzuki-shunsuke/tfaction/js@v1.14.0 id: main with: action: export-aws-secrets-manager diff --git a/export-secrets/action.yaml b/export-secrets/action.yaml index d7f7c78a3..814d37f94 100644 --- a/export-secrets/action.yaml +++ b/export-secrets/action.yaml @@ -10,7 +10,7 @@ runs: - run: | echo '::warning::This action was deprecated. Please use suzuki-shunsuke/tfaction/js action.' shell: bash - - uses: suzuki-shunsuke/tfaction/js@main + - uses: suzuki-shunsuke/tfaction/js@v1.14.0 id: main with: action: export-secrets diff --git a/generate-config-out/action.yaml b/generate-config-out/action.yaml index e44eecf21..3769bcdb3 100644 --- a/generate-config-out/action.yaml +++ b/generate-config-out/action.yaml @@ -16,7 +16,7 @@ inputs: runs: using: composite steps: - - uses: suzuki-shunsuke/tfaction/js@main + - uses: suzuki-shunsuke/tfaction/js@v1.14.0 id: target-config with: action: get-target-config diff --git a/get-global-config/action.yaml b/get-global-config/action.yaml index c2b4d290a..9b2c560c1 100644 --- a/get-global-config/action.yaml +++ b/get-global-config/action.yaml @@ -75,7 +75,7 @@ runs: - run: | echo '::warning::This action was deprecated. Please use suzuki-shunsuke/tfaction/js action.' shell: bash - - uses: suzuki-shunsuke/tfaction/js@main + - uses: suzuki-shunsuke/tfaction/js@v1.14.0 id: global-config with: action: get-global-config diff --git a/get-target-config/action.yaml b/get-target-config/action.yaml index 1e82e5001..69a053a8b 100644 --- a/get-target-config/action.yaml +++ b/get-target-config/action.yaml @@ -69,7 +69,7 @@ runs: - run: | echo '::warning::This action was deprecated. Please use suzuki-shunsuke/tfaction/js action.' shell: bash - - uses: suzuki-shunsuke/tfaction/js@main + - uses: suzuki-shunsuke/tfaction/js@v1.14.0 id: target-config with: action: get-target-config diff --git a/js/dist/LICENSE b/js/dist/LICENSE new file mode 100644 index 000000000..60c6e4c8a --- /dev/null +++ b/js/dist/LICENSE @@ -0,0 +1,11868 @@ +@actions/core +MIT +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +@actions/exec +MIT +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +@actions/github +MIT +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +@actions/http-client +MIT +Actions Http Client for Node.js + +Copyright (c) GitHub, Inc. + +All rights reserved. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +associated documentation files (the "Software"), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +@actions/io +MIT +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +@aws-sdk/client-secrets-manager +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@aws-sdk/client-sso +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@aws-sdk/client-sso-oidc +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@aws-sdk/client-sts +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@aws-sdk/core +Apache-2.0 + +@aws-sdk/credential-provider-env +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@aws-sdk/credential-provider-http +Apache-2.0 + +@aws-sdk/credential-provider-ini +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@aws-sdk/credential-provider-node +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@aws-sdk/credential-provider-process +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@aws-sdk/credential-provider-sso +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@aws-sdk/credential-provider-web-identity +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@aws-sdk/middleware-host-header +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@aws-sdk/middleware-logger +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@aws-sdk/middleware-recursion-detection +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@aws-sdk/middleware-user-agent +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@aws-sdk/region-config-resolver +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@aws-sdk/token-providers +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@aws-sdk/util-endpoints +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@aws-sdk/util-user-agent-node +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@ewoudenberg/difflib +PSF + +@fastify/busboy +MIT +Copyright Brian White. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. + +@octokit/auth-token +MIT +The MIT License + +Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +@octokit/core +MIT +The MIT License + +Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +@octokit/endpoint +MIT +The MIT License + +Copyright (c) 2018 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +@octokit/graphql +MIT +The MIT License + +Copyright (c) 2018 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +@octokit/plugin-paginate-rest +MIT +MIT License Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +@octokit/plugin-rest-endpoint-methods +MIT +MIT License Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +@octokit/request +MIT +The MIT License + +Copyright (c) 2018 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +@octokit/request-error +MIT +The MIT License + +Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +@smithy/config-resolver +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/core +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@smithy/credential-provider-imds +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/fetch-http-handler +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/hash-node +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/is-array-buffer +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/middleware-content-length +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/middleware-endpoint +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/middleware-retry +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@smithy/middleware-serde +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@smithy/middleware-stack +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/node-config-provider +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/node-http-handler +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/property-provider +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/protocol-http +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@smithy/querystring-builder +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@smithy/querystring-parser +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@smithy/service-error-classification +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@smithy/shared-ini-file-loader +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/signature-v4 +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/smithy-client +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@smithy/types +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@smithy/url-parser +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@smithy/util-base64 +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/util-body-length-node +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/util-buffer-from +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/util-config-provider +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/util-defaults-mode-node +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +@smithy/util-endpoints +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/util-hex-encoding +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/util-middleware +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/util-retry +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/util-stream +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/util-uri-escape +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +@smithy/util-utf8 +Apache-2.0 +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +balanced-match +MIT +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +before-after-hook +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018 Gregor Martynus and other contributors. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +brace-expansion +MIT +MIT License + +Copyright (c) 2013 Julian Gruber + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +colors +MIT +MIT License + +Original Library + - Copyright (c) Marak Squires + +Additional Functionality + - Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +deprecation +ISC +The ISC License + +Copyright (c) Gregor Martynus and contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + +fast-xml-parser +MIT +MIT License + +Copyright (c) 2017 Amit Kumar Gupta + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +glob +ISC +The ISC License + +Copyright (c) 2009-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + +heap +MIT +The MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +js-yaml +MIT +(The MIT License) + +Copyright (C) 2011-2015 by Vitaly Puzrin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +json-diff +MIT +The MIT License (MIT) + +Copyright (c) 2015 Andrey Tarantsov + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +lru-cache +ISC +The ISC License + +Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + +minimatch +ISC +The ISC License + +Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + +minipass +ISC +The ISC License + +Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + +once +ISC +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + +path-scurry +BlueOak-1.0.0 +# Blue Oak Model License + +Version 1.0.0 + +## Purpose + +This license gives everyone as much permission to work with +this software as possible, while protecting contributors +from liability. + +## Acceptance + +In order to receive this license, you must agree to its +rules. The rules of this license are both obligations +under that agreement and conditions to your license. +You must not do anything with this software that triggers +a rule that you cannot or will not follow. + +## Copyright + +Each contributor licenses you to do everything with this +software that would otherwise infringe that contributor's +copyright in it. + +## Notices + +You must ensure that everyone who gets a copy of +any part of this software from you, with or without +changes, also gets the text of this license or a link to +. + +## Excuse + +If anyone notifies you in writing that you have not +complied with [Notices](#notices), you can keep your +license by taking all practical steps to comply within 30 +days after the notice. If you do not do so, your license +ends immediately. + +## Patent + +Each contributor licenses you to do everything with this +software that would otherwise infringe any patent claims +they can license or become able to license. + +## Reliability + +No contributor can revoke this license. + +## No Liability + +***As far as the law allows, this software comes as is, +without any warranty or condition, and no contributor +will be liable to anyone for any damages related to this +software or this license, under any kind of legal claim.*** + + +strnum +MIT +MIT License + +Copyright (c) 2021 Natural Intelligence + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +tfaction +MIT +MIT License + +Copyright (c) 2022 Shunsuke Suzuki + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +tmp +MIT +The MIT License (MIT) + +Copyright (c) 2014 KARASZI István + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +tslib +0BSD +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + +tunnel +MIT +The MIT License (MIT) + +Copyright (c) 2012 Koichi Kobayashi + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +undici +MIT +MIT License + +Copyright (c) Matteo Collina and Undici contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +universal-user-agent +ISC +# [ISC License](https://spdx.org/licenses/ISC) + +Copyright (c) 2018, Gregor Martynus (https://github.com/gr2m) + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + +uuid +MIT +The MIT License (MIT) + +Copyright (c) 2010-2020 Robert Kieffer and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +wrappy +ISC +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + +zod +MIT +MIT License + +Copyright (c) 2020 Colin McDonnell + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/js/dist/index.js b/js/dist/index.js new file mode 100644 index 000000000..a968bc03c --- /dev/null +++ b/js/dist/index.js @@ -0,0 +1,76190 @@ +/******/ (() => { // webpackBootstrap +/******/ var __webpack_modules__ = ({ + +/***/ 4914: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.issue = exports.issueCommand = void 0; +const os = __importStar(__nccwpck_require__(857)); +const utils_1 = __nccwpck_require__(302); +/** + * Commands + * + * Command Format: + * ::name key=value,key=value::message + * + * Examples: + * ::warning::This is the message + * ::set-env name=MY_VAR::some value + */ +function issueCommand(command, properties, message) { + const cmd = new Command(command, properties, message); + process.stdout.write(cmd.toString() + os.EOL); +} +exports.issueCommand = issueCommand; +function issue(name, message = '') { + issueCommand(name, {}, message); +} +exports.issue = issue; +const CMD_STRING = '::'; +class Command { + constructor(command, properties, message) { + if (!command) { + command = 'missing.command'; + } + this.command = command; + this.properties = properties; + this.message = message; + } + toString() { + let cmdStr = CMD_STRING + this.command; + if (this.properties && Object.keys(this.properties).length > 0) { + cmdStr += ' '; + let first = true; + for (const key in this.properties) { + if (this.properties.hasOwnProperty(key)) { + const val = this.properties[key]; + if (val) { + if (first) { + first = false; + } + else { + cmdStr += ','; + } + cmdStr += `${key}=${escapeProperty(val)}`; + } + } + } + } + cmdStr += `${CMD_STRING}${escapeData(this.message)}`; + return cmdStr; + } +} +function escapeData(s) { + return (0, utils_1.toCommandValue)(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A'); +} +function escapeProperty(s) { + return (0, utils_1.toCommandValue)(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A') + .replace(/:/g, '%3A') + .replace(/,/g, '%2C'); +} +//# sourceMappingURL=command.js.map + +/***/ }), + +/***/ 7484: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.platform = exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = exports.markdownSummary = exports.summary = exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; +const command_1 = __nccwpck_require__(4914); +const file_command_1 = __nccwpck_require__(4753); +const utils_1 = __nccwpck_require__(302); +const os = __importStar(__nccwpck_require__(857)); +const path = __importStar(__nccwpck_require__(6928)); +const oidc_utils_1 = __nccwpck_require__(5306); +/** + * The code to exit an action + */ +var ExitCode; +(function (ExitCode) { + /** + * A code indicating that the action was successful + */ + ExitCode[ExitCode["Success"] = 0] = "Success"; + /** + * A code indicating that the action was a failure + */ + ExitCode[ExitCode["Failure"] = 1] = "Failure"; +})(ExitCode || (exports.ExitCode = ExitCode = {})); +//----------------------------------------------------------------------- +// Variables +//----------------------------------------------------------------------- +/** + * Sets env variable for this action and future actions in the job + * @param name the name of the variable to set + * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function exportVariable(name, val) { + const convertedVal = (0, utils_1.toCommandValue)(val); + process.env[name] = convertedVal; + const filePath = process.env['GITHUB_ENV'] || ''; + if (filePath) { + return (0, file_command_1.issueFileCommand)('ENV', (0, file_command_1.prepareKeyValueMessage)(name, val)); + } + (0, command_1.issueCommand)('set-env', { name }, convertedVal); +} +exports.exportVariable = exportVariable; +/** + * Registers a secret which will get masked from logs + * @param secret value of the secret + */ +function setSecret(secret) { + (0, command_1.issueCommand)('add-mask', {}, secret); +} +exports.setSecret = setSecret; +/** + * Prepends inputPath to the PATH (for this action and future actions) + * @param inputPath + */ +function addPath(inputPath) { + const filePath = process.env['GITHUB_PATH'] || ''; + if (filePath) { + (0, file_command_1.issueFileCommand)('PATH', inputPath); + } + else { + (0, command_1.issueCommand)('add-path', {}, inputPath); + } + process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; +} +exports.addPath = addPath; +/** + * Gets the value of an input. + * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. + * Returns an empty string if the value is not defined. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string + */ +function getInput(name, options) { + const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; + if (options && options.required && !val) { + throw new Error(`Input required and not supplied: ${name}`); + } + if (options && options.trimWhitespace === false) { + return val; + } + return val.trim(); +} +exports.getInput = getInput; +/** + * Gets the values of an multiline input. Each value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string[] + * + */ +function getMultilineInput(name, options) { + const inputs = getInput(name, options) + .split('\n') + .filter(x => x !== ''); + if (options && options.trimWhitespace === false) { + return inputs; + } + return inputs.map(input => input.trim()); +} +exports.getMultilineInput = getMultilineInput; +/** + * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. + * Support boolean input list: `true | True | TRUE | false | False | FALSE` . + * The return value is also in boolean type. + * ref: https://yaml.org/spec/1.2/spec.html#id2804923 + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns boolean + */ +function getBooleanInput(name, options) { + const trueValue = ['true', 'True', 'TRUE']; + const falseValue = ['false', 'False', 'FALSE']; + const val = getInput(name, options); + if (trueValue.includes(val)) + return true; + if (falseValue.includes(val)) + return false; + throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + + `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); +} +exports.getBooleanInput = getBooleanInput; +/** + * Sets the value of an output. + * + * @param name name of the output to set + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function setOutput(name, value) { + const filePath = process.env['GITHUB_OUTPUT'] || ''; + if (filePath) { + return (0, file_command_1.issueFileCommand)('OUTPUT', (0, file_command_1.prepareKeyValueMessage)(name, value)); + } + process.stdout.write(os.EOL); + (0, command_1.issueCommand)('set-output', { name }, (0, utils_1.toCommandValue)(value)); +} +exports.setOutput = setOutput; +/** + * Enables or disables the echoing of commands into stdout for the rest of the step. + * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. + * + */ +function setCommandEcho(enabled) { + (0, command_1.issue)('echo', enabled ? 'on' : 'off'); +} +exports.setCommandEcho = setCommandEcho; +//----------------------------------------------------------------------- +// Results +//----------------------------------------------------------------------- +/** + * Sets the action status to failed. + * When the action exits it will be with an exit code of 1 + * @param message add error issue message + */ +function setFailed(message) { + process.exitCode = ExitCode.Failure; + error(message); +} +exports.setFailed = setFailed; +//----------------------------------------------------------------------- +// Logging Commands +//----------------------------------------------------------------------- +/** + * Gets whether Actions Step Debug is on or not + */ +function isDebug() { + return process.env['RUNNER_DEBUG'] === '1'; +} +exports.isDebug = isDebug; +/** + * Writes debug message to user log + * @param message debug message + */ +function debug(message) { + (0, command_1.issueCommand)('debug', {}, message); +} +exports.debug = debug; +/** + * Adds an error issue + * @param message error issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function error(message, properties = {}) { + (0, command_1.issueCommand)('error', (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); +} +exports.error = error; +/** + * Adds a warning issue + * @param message warning issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function warning(message, properties = {}) { + (0, command_1.issueCommand)('warning', (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); +} +exports.warning = warning; +/** + * Adds a notice issue + * @param message notice issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function notice(message, properties = {}) { + (0, command_1.issueCommand)('notice', (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); +} +exports.notice = notice; +/** + * Writes info to log with console.log. + * @param message info message + */ +function info(message) { + process.stdout.write(message + os.EOL); +} +exports.info = info; +/** + * Begin an output group. + * + * Output until the next `groupEnd` will be foldable in this group + * + * @param name The name of the output group + */ +function startGroup(name) { + (0, command_1.issue)('group', name); +} +exports.startGroup = startGroup; +/** + * End an output group. + */ +function endGroup() { + (0, command_1.issue)('endgroup'); +} +exports.endGroup = endGroup; +/** + * Wrap an asynchronous function call in a group. + * + * Returns the same type as the function itself. + * + * @param name The name of the group + * @param fn The function to wrap in the group + */ +function group(name, fn) { + return __awaiter(this, void 0, void 0, function* () { + startGroup(name); + let result; + try { + result = yield fn(); + } + finally { + endGroup(); + } + return result; + }); +} +exports.group = group; +//----------------------------------------------------------------------- +// Wrapper action state +//----------------------------------------------------------------------- +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function saveState(name, value) { + const filePath = process.env['GITHUB_STATE'] || ''; + if (filePath) { + return (0, file_command_1.issueFileCommand)('STATE', (0, file_command_1.prepareKeyValueMessage)(name, value)); + } + (0, command_1.issueCommand)('save-state', { name }, (0, utils_1.toCommandValue)(value)); +} +exports.saveState = saveState; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +function getState(name) { + return process.env[`STATE_${name}`] || ''; +} +exports.getState = getState; +function getIDToken(aud) { + return __awaiter(this, void 0, void 0, function* () { + return yield oidc_utils_1.OidcClient.getIDToken(aud); + }); +} +exports.getIDToken = getIDToken; +/** + * Summary exports + */ +var summary_1 = __nccwpck_require__(1847); +Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); +/** + * @deprecated use core.summary + */ +var summary_2 = __nccwpck_require__(1847); +Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); +/** + * Path exports + */ +var path_utils_1 = __nccwpck_require__(1976); +Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); +Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); +Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); +/** + * Platform utilities exports + */ +exports.platform = __importStar(__nccwpck_require__(8968)); +//# sourceMappingURL=core.js.map + +/***/ }), + +/***/ 4753: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// For internal use, subject to change. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +const crypto = __importStar(__nccwpck_require__(6982)); +const fs = __importStar(__nccwpck_require__(9896)); +const os = __importStar(__nccwpck_require__(857)); +const utils_1 = __nccwpck_require__(302); +function issueFileCommand(command, message) { + const filePath = process.env[`GITHUB_${command}`]; + if (!filePath) { + throw new Error(`Unable to find environment variable for file command ${command}`); + } + if (!fs.existsSync(filePath)) { + throw new Error(`Missing file at path: ${filePath}`); + } + fs.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os.EOL}`, { + encoding: 'utf8' + }); +} +exports.issueFileCommand = issueFileCommand; +function prepareKeyValueMessage(key, value) { + const delimiter = `ghadelimiter_${crypto.randomUUID()}`; + const convertedValue = (0, utils_1.toCommandValue)(value); + // These should realistically never happen, but just in case someone finds a + // way to exploit uuid generation let's not allow keys or values that contain + // the delimiter. + if (key.includes(delimiter)) { + throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); + } + if (convertedValue.includes(delimiter)) { + throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); + } + return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`; +} +exports.prepareKeyValueMessage = prepareKeyValueMessage; +//# sourceMappingURL=file-command.js.map + +/***/ }), + +/***/ 5306: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.OidcClient = void 0; +const http_client_1 = __nccwpck_require__(4844); +const auth_1 = __nccwpck_require__(4552); +const core_1 = __nccwpck_require__(7484); +class OidcClient { + static createHttpClient(allowRetry = true, maxRetry = 10) { + const requestOptions = { + allowRetries: allowRetry, + maxRetries: maxRetry + }; + return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions); + } + static getRequestToken() { + const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN']; + if (!token) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable'); + } + return token; + } + static getIDTokenUrl() { + const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL']; + if (!runtimeUrl) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable'); + } + return runtimeUrl; + } + static getCall(id_token_url) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const httpclient = OidcClient.createHttpClient(); + const res = yield httpclient + .getJson(id_token_url) + .catch(error => { + throw new Error(`Failed to get ID Token. \n + Error Code : ${error.statusCode}\n + Error Message: ${error.message}`); + }); + const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; + if (!id_token) { + throw new Error('Response json body do not have ID Token field'); + } + return id_token; + }); + } + static getIDToken(audience) { + return __awaiter(this, void 0, void 0, function* () { + try { + // New ID Token is requested from action service + let id_token_url = OidcClient.getIDTokenUrl(); + if (audience) { + const encodedAudience = encodeURIComponent(audience); + id_token_url = `${id_token_url}&audience=${encodedAudience}`; + } + (0, core_1.debug)(`ID token url is ${id_token_url}`); + const id_token = yield OidcClient.getCall(id_token_url); + (0, core_1.setSecret)(id_token); + return id_token; + } + catch (error) { + throw new Error(`Error message: ${error.message}`); + } + }); + } +} +exports.OidcClient = OidcClient; +//# sourceMappingURL=oidc-utils.js.map + +/***/ }), + +/***/ 1976: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; +const path = __importStar(__nccwpck_require__(6928)); +/** + * toPosixPath converts the given path to the posix form. On Windows, \\ will be + * replaced with /. + * + * @param pth. Path to transform. + * @return string Posix path. + */ +function toPosixPath(pth) { + return pth.replace(/[\\]/g, '/'); +} +exports.toPosixPath = toPosixPath; +/** + * toWin32Path converts the given path to the win32 form. On Linux, / will be + * replaced with \\. + * + * @param pth. Path to transform. + * @return string Win32 path. + */ +function toWin32Path(pth) { + return pth.replace(/[/]/g, '\\'); +} +exports.toWin32Path = toWin32Path; +/** + * toPlatformPath converts the given path to a platform-specific path. It does + * this by replacing instances of / and \ with the platform-specific path + * separator. + * + * @param pth The path to platformize. + * @return string The platform-specific path. + */ +function toPlatformPath(pth) { + return pth.replace(/[/\\]/g, path.sep); +} +exports.toPlatformPath = toPlatformPath; +//# sourceMappingURL=path-utils.js.map + +/***/ }), + +/***/ 8968: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getDetails = exports.isLinux = exports.isMacOS = exports.isWindows = exports.arch = exports.platform = void 0; +const os_1 = __importDefault(__nccwpck_require__(857)); +const exec = __importStar(__nccwpck_require__(5236)); +const getWindowsInfo = () => __awaiter(void 0, void 0, void 0, function* () { + const { stdout: version } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Version"', undefined, { + silent: true + }); + const { stdout: name } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Caption"', undefined, { + silent: true + }); + return { + name: name.trim(), + version: version.trim() + }; +}); +const getMacOsInfo = () => __awaiter(void 0, void 0, void 0, function* () { + var _a, _b, _c, _d; + const { stdout } = yield exec.getExecOutput('sw_vers', undefined, { + silent: true + }); + const version = (_b = (_a = stdout.match(/ProductVersion:\s*(.+)/)) === null || _a === void 0 ? void 0 : _a[1]) !== null && _b !== void 0 ? _b : ''; + const name = (_d = (_c = stdout.match(/ProductName:\s*(.+)/)) === null || _c === void 0 ? void 0 : _c[1]) !== null && _d !== void 0 ? _d : ''; + return { + name, + version + }; +}); +const getLinuxInfo = () => __awaiter(void 0, void 0, void 0, function* () { + const { stdout } = yield exec.getExecOutput('lsb_release', ['-i', '-r', '-s'], { + silent: true + }); + const [name, version] = stdout.trim().split('\n'); + return { + name, + version + }; +}); +exports.platform = os_1.default.platform(); +exports.arch = os_1.default.arch(); +exports.isWindows = exports.platform === 'win32'; +exports.isMacOS = exports.platform === 'darwin'; +exports.isLinux = exports.platform === 'linux'; +function getDetails() { + return __awaiter(this, void 0, void 0, function* () { + return Object.assign(Object.assign({}, (yield (exports.isWindows + ? getWindowsInfo() + : exports.isMacOS + ? getMacOsInfo() + : getLinuxInfo()))), { platform: exports.platform, + arch: exports.arch, + isWindows: exports.isWindows, + isMacOS: exports.isMacOS, + isLinux: exports.isLinux }); + }); +} +exports.getDetails = getDetails; +//# sourceMappingURL=platform.js.map + +/***/ }), + +/***/ 1847: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; +const os_1 = __nccwpck_require__(857); +const fs_1 = __nccwpck_require__(9896); +const { access, appendFile, writeFile } = fs_1.promises; +exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; +exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; +class Summary { + constructor() { + this._buffer = ''; + } + /** + * Finds the summary file path from the environment, rejects if env var is not found or file does not exist + * Also checks r/w permissions. + * + * @returns step summary file path + */ + filePath() { + return __awaiter(this, void 0, void 0, function* () { + if (this._filePath) { + return this._filePath; + } + const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; + if (!pathFromEnv) { + throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); + } + try { + yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); + } + catch (_a) { + throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); + } + this._filePath = pathFromEnv; + return this._filePath; + }); + } + /** + * Wraps content in an HTML tag, adding any HTML attributes + * + * @param {string} tag HTML tag to wrap + * @param {string | null} content content within the tag + * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add + * + * @returns {string} content wrapped in HTML element + */ + wrap(tag, content, attrs = {}) { + const htmlAttrs = Object.entries(attrs) + .map(([key, value]) => ` ${key}="${value}"`) + .join(''); + if (!content) { + return `<${tag}${htmlAttrs}>`; + } + return `<${tag}${htmlAttrs}>${content}`; + } + /** + * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. + * + * @param {SummaryWriteOptions} [options] (optional) options for write operation + * + * @returns {Promise} summary instance + */ + write(options) { + return __awaiter(this, void 0, void 0, function* () { + const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); + const filePath = yield this.filePath(); + const writeFunc = overwrite ? writeFile : appendFile; + yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); + return this.emptyBuffer(); + }); + } + /** + * Clears the summary buffer and wipes the summary file + * + * @returns {Summary} summary instance + */ + clear() { + return __awaiter(this, void 0, void 0, function* () { + return this.emptyBuffer().write({ overwrite: true }); + }); + } + /** + * Returns the current summary buffer as a string + * + * @returns {string} string of summary buffer + */ + stringify() { + return this._buffer; + } + /** + * If the summary buffer is empty + * + * @returns {boolen} true if the buffer is empty + */ + isEmptyBuffer() { + return this._buffer.length === 0; + } + /** + * Resets the summary buffer without writing to summary file + * + * @returns {Summary} summary instance + */ + emptyBuffer() { + this._buffer = ''; + return this; + } + /** + * Adds raw text to the summary buffer + * + * @param {string} text content to add + * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) + * + * @returns {Summary} summary instance + */ + addRaw(text, addEOL = false) { + this._buffer += text; + return addEOL ? this.addEOL() : this; + } + /** + * Adds the operating system-specific end-of-line marker to the buffer + * + * @returns {Summary} summary instance + */ + addEOL() { + return this.addRaw(os_1.EOL); + } + /** + * Adds an HTML codeblock to the summary buffer + * + * @param {string} code content to render within fenced code block + * @param {string} lang (optional) language to syntax highlight code + * + * @returns {Summary} summary instance + */ + addCodeBlock(code, lang) { + const attrs = Object.assign({}, (lang && { lang })); + const element = this.wrap('pre', this.wrap('code', code), attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML list to the summary buffer + * + * @param {string[]} items list of items to render + * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) + * + * @returns {Summary} summary instance + */ + addList(items, ordered = false) { + const tag = ordered ? 'ol' : 'ul'; + const listItems = items.map(item => this.wrap('li', item)).join(''); + const element = this.wrap(tag, listItems); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML table to the summary buffer + * + * @param {SummaryTableCell[]} rows table rows + * + * @returns {Summary} summary instance + */ + addTable(rows) { + const tableBody = rows + .map(row => { + const cells = row + .map(cell => { + if (typeof cell === 'string') { + return this.wrap('td', cell); + } + const { header, data, colspan, rowspan } = cell; + const tag = header ? 'th' : 'td'; + const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); + return this.wrap(tag, data, attrs); + }) + .join(''); + return this.wrap('tr', cells); + }) + .join(''); + const element = this.wrap('table', tableBody); + return this.addRaw(element).addEOL(); + } + /** + * Adds a collapsable HTML details element to the summary buffer + * + * @param {string} label text for the closed state + * @param {string} content collapsable content + * + * @returns {Summary} summary instance + */ + addDetails(label, content) { + const element = this.wrap('details', this.wrap('summary', label) + content); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML image tag to the summary buffer + * + * @param {string} src path to the image you to embed + * @param {string} alt text description of the image + * @param {SummaryImageOptions} options (optional) addition image attributes + * + * @returns {Summary} summary instance + */ + addImage(src, alt, options) { + const { width, height } = options || {}; + const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); + const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML section heading element + * + * @param {string} text heading text + * @param {number | string} [level=1] (optional) the heading level, default: 1 + * + * @returns {Summary} summary instance + */ + addHeading(text, level) { + const tag = `h${level}`; + const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) + ? tag + : 'h1'; + const element = this.wrap(allowedTag, text); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML thematic break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addSeparator() { + const element = this.wrap('hr', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML line break (
) to the summary buffer + * + * @returns {Summary} summary instance + */ + addBreak() { + const element = this.wrap('br', null); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML blockquote to the summary buffer + * + * @param {string} text quote text + * @param {string} cite (optional) citation url + * + * @returns {Summary} summary instance + */ + addQuote(text, cite) { + const attrs = Object.assign({}, (cite && { cite })); + const element = this.wrap('blockquote', text, attrs); + return this.addRaw(element).addEOL(); + } + /** + * Adds an HTML anchor tag to the summary buffer + * + * @param {string} text link text/content + * @param {string} href hyperlink + * + * @returns {Summary} summary instance + */ + addLink(text, href) { + const element = this.wrap('a', text, { href }); + return this.addRaw(element).addEOL(); + } +} +const _summary = new Summary(); +/** + * @deprecated use `core.summary` + */ +exports.markdownSummary = _summary; +exports.summary = _summary; +//# sourceMappingURL=summary.js.map + +/***/ }), + +/***/ 302: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toCommandProperties = exports.toCommandValue = void 0; +/** + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string + */ +function toCommandValue(input) { + if (input === null || input === undefined) { + return ''; + } + else if (typeof input === 'string' || input instanceof String) { + return input; + } + return JSON.stringify(input); +} +exports.toCommandValue = toCommandValue; +/** + * + * @param annotationProperties + * @returns The command properties to send with the actual annotation command + * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 + */ +function toCommandProperties(annotationProperties) { + if (!Object.keys(annotationProperties).length) { + return {}; + } + return { + title: annotationProperties.title, + file: annotationProperties.file, + line: annotationProperties.startLine, + endLine: annotationProperties.endLine, + col: annotationProperties.startColumn, + endColumn: annotationProperties.endColumn + }; +} +exports.toCommandProperties = toCommandProperties; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 5236: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getExecOutput = exports.exec = void 0; +const string_decoder_1 = __nccwpck_require__(3193); +const tr = __importStar(__nccwpck_require__(6665)); +/** + * Exec a command. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param args optional arguments for tool. Escaping is handled by the lib. + * @param options optional exec options. See ExecOptions + * @returns Promise exit code + */ +function exec(commandLine, args, options) { + return __awaiter(this, void 0, void 0, function* () { + const commandArgs = tr.argStringToArray(commandLine); + if (commandArgs.length === 0) { + throw new Error(`Parameter 'commandLine' cannot be null or empty.`); + } + // Path to tool to execute should be first arg + const toolPath = commandArgs[0]; + args = commandArgs.slice(1).concat(args || []); + const runner = new tr.ToolRunner(toolPath, args, options); + return runner.exec(); + }); +} +exports.exec = exec; +/** + * Exec a command and get the output. + * Output will be streamed to the live console. + * Returns promise with the exit code and collected stdout and stderr + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param args optional arguments for tool. Escaping is handled by the lib. + * @param options optional exec options. See ExecOptions + * @returns Promise exit code, stdout, and stderr + */ +function getExecOutput(commandLine, args, options) { + var _a, _b; + return __awaiter(this, void 0, void 0, function* () { + let stdout = ''; + let stderr = ''; + //Using string decoder covers the case where a mult-byte character is split + const stdoutDecoder = new string_decoder_1.StringDecoder('utf8'); + const stderrDecoder = new string_decoder_1.StringDecoder('utf8'); + const originalStdoutListener = (_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout; + const originalStdErrListener = (_b = options === null || options === void 0 ? void 0 : options.listeners) === null || _b === void 0 ? void 0 : _b.stderr; + const stdErrListener = (data) => { + stderr += stderrDecoder.write(data); + if (originalStdErrListener) { + originalStdErrListener(data); + } + }; + const stdOutListener = (data) => { + stdout += stdoutDecoder.write(data); + if (originalStdoutListener) { + originalStdoutListener(data); + } + }; + const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener }); + const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners })); + //flush any remaining characters + stdout += stdoutDecoder.end(); + stderr += stderrDecoder.end(); + return { + exitCode, + stdout, + stderr + }; + }); +} +exports.getExecOutput = getExecOutput; +//# sourceMappingURL=exec.js.map + +/***/ }), + +/***/ 6665: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.argStringToArray = exports.ToolRunner = void 0; +const os = __importStar(__nccwpck_require__(857)); +const events = __importStar(__nccwpck_require__(4434)); +const child = __importStar(__nccwpck_require__(5317)); +const path = __importStar(__nccwpck_require__(6928)); +const io = __importStar(__nccwpck_require__(4994)); +const ioUtil = __importStar(__nccwpck_require__(5207)); +const timers_1 = __nccwpck_require__(3557); +/* eslint-disable @typescript-eslint/unbound-method */ +const IS_WINDOWS = process.platform === 'win32'; +/* + * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. + */ +class ToolRunner extends events.EventEmitter { + constructor(toolPath, args, options) { + super(); + if (!toolPath) { + throw new Error("Parameter 'toolPath' cannot be null or empty."); + } + this.toolPath = toolPath; + this.args = args || []; + this.options = options || {}; + } + _debug(message) { + if (this.options.listeners && this.options.listeners.debug) { + this.options.listeners.debug(message); + } + } + _getCommandString(options, noPrefix) { + const toolPath = this._getSpawnFileName(); + const args = this._getSpawnArgs(options); + let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool + if (IS_WINDOWS) { + // Windows + cmd file + if (this._isCmdFile()) { + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows + verbatim + else if (options.windowsVerbatimArguments) { + cmd += `"${toolPath}"`; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows (regular) + else { + cmd += this._windowsQuoteCmdArg(toolPath); + for (const a of args) { + cmd += ` ${this._windowsQuoteCmdArg(a)}`; + } + } + } + else { + // OSX/Linux - this can likely be improved with some form of quoting. + // creating processes on Unix is fundamentally different than Windows. + // on Unix, execvp() takes an arg array. + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + return cmd; + } + _processLineBuffer(data, strBuffer, onLine) { + try { + let s = strBuffer + data.toString(); + let n = s.indexOf(os.EOL); + while (n > -1) { + const line = s.substring(0, n); + onLine(line); + // the rest of the string ... + s = s.substring(n + os.EOL.length); + n = s.indexOf(os.EOL); + } + return s; + } + catch (err) { + // streaming lines to console is best effort. Don't fail a build. + this._debug(`error processing line. Failed with error ${err}`); + return ''; + } + } + _getSpawnFileName() { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + return process.env['COMSPEC'] || 'cmd.exe'; + } + } + return this.toolPath; + } + _getSpawnArgs(options) { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; + for (const a of this.args) { + argline += ' '; + argline += options.windowsVerbatimArguments + ? a + : this._windowsQuoteCmdArg(a); + } + argline += '"'; + return [argline]; + } + } + return this.args; + } + _endsWith(str, end) { + return str.endsWith(end); + } + _isCmdFile() { + const upperToolPath = this.toolPath.toUpperCase(); + return (this._endsWith(upperToolPath, '.CMD') || + this._endsWith(upperToolPath, '.BAT')); + } + _windowsQuoteCmdArg(arg) { + // for .exe, apply the normal quoting rules that libuv applies + if (!this._isCmdFile()) { + return this._uvQuoteCmdArg(arg); + } + // otherwise apply quoting rules specific to the cmd.exe command line parser. + // the libuv rules are generic and are not designed specifically for cmd.exe + // command line parser. + // + // for a detailed description of the cmd.exe command line parser, refer to + // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 + // need quotes for empty arg + if (!arg) { + return '""'; + } + // determine whether the arg needs to be quoted + const cmdSpecialChars = [ + ' ', + '\t', + '&', + '(', + ')', + '[', + ']', + '{', + '}', + '^', + '=', + ';', + '!', + "'", + '+', + ',', + '`', + '~', + '|', + '<', + '>', + '"' + ]; + let needsQuotes = false; + for (const char of arg) { + if (cmdSpecialChars.some(x => x === char)) { + needsQuotes = true; + break; + } + } + // short-circuit if quotes not needed + if (!needsQuotes) { + return arg; + } + // the following quoting rules are very similar to the rules that by libuv applies. + // + // 1) wrap the string in quotes + // + // 2) double-up quotes - i.e. " => "" + // + // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately + // doesn't work well with a cmd.exe command line. + // + // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. + // for example, the command line: + // foo.exe "myarg:""my val""" + // is parsed by a .NET console app into an arg array: + // [ "myarg:\"my val\"" ] + // which is the same end result when applying libuv quoting rules. although the actual + // command line from libuv quoting rules would look like: + // foo.exe "myarg:\"my val\"" + // + // 3) double-up slashes that precede a quote, + // e.g. hello \world => "hello \world" + // hello\"world => "hello\\""world" + // hello\\"world => "hello\\\\""world" + // hello world\ => "hello world\\" + // + // technically this is not required for a cmd.exe command line, or the batch argument parser. + // the reasons for including this as a .cmd quoting rule are: + // + // a) this is optimized for the scenario where the argument is passed from the .cmd file to an + // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. + // + // b) it's what we've been doing previously (by deferring to node default behavior) and we + // haven't heard any complaints about that aspect. + // + // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be + // escaped when used on the command line directly - even though within a .cmd file % can be escaped + // by using %%. + // + // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts + // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. + // + // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would + // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the + // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args + // to an external program. + // + // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. + // % can be escaped within a .cmd file. + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; // double the slash + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '"'; // double the quote + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _uvQuoteCmdArg(arg) { + // Tool runner wraps child_process.spawn() and needs to apply the same quoting as + // Node in certain cases where the undocumented spawn option windowsVerbatimArguments + // is used. + // + // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, + // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), + // pasting copyright notice from Node within this function: + // + // Copyright Joyent, Inc. and other Node contributors. All rights reserved. + // + // Permission is hereby granted, free of charge, to any person obtaining a copy + // of this software and associated documentation files (the "Software"), to + // deal in the Software without restriction, including without limitation the + // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + // sell copies of the Software, and to permit persons to whom the Software is + // furnished to do so, subject to the following conditions: + // + // The above copyright notice and this permission notice shall be included in + // all copies or substantial portions of the Software. + // + // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + // IN THE SOFTWARE. + if (!arg) { + // Need double quotation for empty argument + return '""'; + } + if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { + // No quotation needed + return arg; + } + if (!arg.includes('"') && !arg.includes('\\')) { + // No embedded double quotes or backslashes, so I can just wrap + // quote marks around the whole thing. + return `"${arg}"`; + } + // Expected input/output: + // input : hello"world + // output: "hello\"world" + // input : hello""world + // output: "hello\"\"world" + // input : hello\world + // output: hello\world + // input : hello\\world + // output: hello\\world + // input : hello\"world + // output: "hello\\\"world" + // input : hello\\"world + // output: "hello\\\\\"world" + // input : hello world\ + // output: "hello world\\" - note the comment in libuv actually reads "hello world\" + // but it appears the comment is wrong, it should be "hello world\\" + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '\\'; + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _cloneExecOptions(options) { + options = options || {}; + const result = { + cwd: options.cwd || process.cwd(), + env: options.env || process.env, + silent: options.silent || false, + windowsVerbatimArguments: options.windowsVerbatimArguments || false, + failOnStdErr: options.failOnStdErr || false, + ignoreReturnCode: options.ignoreReturnCode || false, + delay: options.delay || 10000 + }; + result.outStream = options.outStream || process.stdout; + result.errStream = options.errStream || process.stderr; + return result; + } + _getSpawnOptions(options, toolPath) { + options = options || {}; + const result = {}; + result.cwd = options.cwd; + result.env = options.env; + result['windowsVerbatimArguments'] = + options.windowsVerbatimArguments || this._isCmdFile(); + if (options.windowsVerbatimArguments) { + result.argv0 = `"${toolPath}"`; + } + return result; + } + /** + * Exec a tool. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param tool path to tool to exec + * @param options optional exec options. See ExecOptions + * @returns number + */ + exec() { + return __awaiter(this, void 0, void 0, function* () { + // root the tool path if it is unrooted and contains relative pathing + if (!ioUtil.isRooted(this.toolPath) && + (this.toolPath.includes('/') || + (IS_WINDOWS && this.toolPath.includes('\\')))) { + // prefer options.cwd if it is specified, however options.cwd may also need to be rooted + this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + } + // if the tool is only a file name, then resolve it from the PATH + // otherwise verify it exists (add extension on Windows if necessary) + this.toolPath = yield io.which(this.toolPath, true); + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + this._debug(`exec tool: ${this.toolPath}`); + this._debug('arguments:'); + for (const arg of this.args) { + this._debug(` ${arg}`); + } + const optionsNonNull = this._cloneExecOptions(this.options); + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); + } + const state = new ExecState(optionsNonNull, this.toolPath); + state.on('debug', (message) => { + this._debug(message); + }); + if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) { + return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`)); + } + const fileName = this._getSpawnFileName(); + const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); + let stdbuffer = ''; + if (cp.stdout) { + cp.stdout.on('data', (data) => { + if (this.options.listeners && this.options.listeners.stdout) { + this.options.listeners.stdout(data); + } + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(data); + } + stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => { + if (this.options.listeners && this.options.listeners.stdline) { + this.options.listeners.stdline(line); + } + }); + }); + } + let errbuffer = ''; + if (cp.stderr) { + cp.stderr.on('data', (data) => { + state.processStderr = true; + if (this.options.listeners && this.options.listeners.stderr) { + this.options.listeners.stderr(data); + } + if (!optionsNonNull.silent && + optionsNonNull.errStream && + optionsNonNull.outStream) { + const s = optionsNonNull.failOnStdErr + ? optionsNonNull.errStream + : optionsNonNull.outStream; + s.write(data); + } + errbuffer = this._processLineBuffer(data, errbuffer, (line) => { + if (this.options.listeners && this.options.listeners.errline) { + this.options.listeners.errline(line); + } + }); + }); + } + cp.on('error', (err) => { + state.processError = err.message; + state.processExited = true; + state.processClosed = true; + state.CheckComplete(); + }); + cp.on('exit', (code) => { + state.processExitCode = code; + state.processExited = true; + this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); + state.CheckComplete(); + }); + cp.on('close', (code) => { + state.processExitCode = code; + state.processExited = true; + state.processClosed = true; + this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); + state.CheckComplete(); + }); + state.on('done', (error, exitCode) => { + if (stdbuffer.length > 0) { + this.emit('stdline', stdbuffer); + } + if (errbuffer.length > 0) { + this.emit('errline', errbuffer); + } + cp.removeAllListeners(); + if (error) { + reject(error); + } + else { + resolve(exitCode); + } + }); + if (this.options.input) { + if (!cp.stdin) { + throw new Error('child process missing stdin'); + } + cp.stdin.end(this.options.input); + } + })); + }); + } +} +exports.ToolRunner = ToolRunner; +/** + * Convert an arg string to an array of args. Handles escaping + * + * @param argString string of arguments + * @returns string[] array of arguments + */ +function argStringToArray(argString) { + const args = []; + let inQuotes = false; + let escaped = false; + let arg = ''; + function append(c) { + // we only escape double quotes. + if (escaped && c !== '"') { + arg += '\\'; + } + arg += c; + escaped = false; + } + for (let i = 0; i < argString.length; i++) { + const c = argString.charAt(i); + if (c === '"') { + if (!escaped) { + inQuotes = !inQuotes; + } + else { + append(c); + } + continue; + } + if (c === '\\' && escaped) { + append(c); + continue; + } + if (c === '\\' && inQuotes) { + escaped = true; + continue; + } + if (c === ' ' && !inQuotes) { + if (arg.length > 0) { + args.push(arg); + arg = ''; + } + continue; + } + append(c); + } + if (arg.length > 0) { + args.push(arg.trim()); + } + return args; +} +exports.argStringToArray = argStringToArray; +class ExecState extends events.EventEmitter { + constructor(options, toolPath) { + super(); + this.processClosed = false; // tracks whether the process has exited and stdio is closed + this.processError = ''; + this.processExitCode = 0; + this.processExited = false; // tracks whether the process has exited + this.processStderr = false; // tracks whether stderr was written to + this.delay = 10000; // 10 seconds + this.done = false; + this.timeout = null; + if (!toolPath) { + throw new Error('toolPath must not be empty'); + } + this.options = options; + this.toolPath = toolPath; + if (options.delay) { + this.delay = options.delay; + } + } + CheckComplete() { + if (this.done) { + return; + } + if (this.processClosed) { + this._setResult(); + } + else if (this.processExited) { + this.timeout = timers_1.setTimeout(ExecState.HandleTimeout, this.delay, this); + } + } + _debug(message) { + this.emit('debug', message); + } + _setResult() { + // determine whether there is an error + let error; + if (this.processExited) { + if (this.processError) { + error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); + } + else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { + error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); + } + else if (this.processStderr && this.options.failOnStdErr) { + error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); + } + } + // clear the timeout + if (this.timeout) { + clearTimeout(this.timeout); + this.timeout = null; + } + this.done = true; + this.emit('done', error, this.processExitCode); + } + static HandleTimeout(state) { + if (state.done) { + return; + } + if (!state.processClosed && state.processExited) { + const message = `The STDIO streams did not close within ${state.delay / + 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; + state._debug(message); + } + state._setResult(); + } +} +//# sourceMappingURL=toolrunner.js.map + +/***/ }), + +/***/ 1648: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Context = void 0; +const fs_1 = __nccwpck_require__(9896); +const os_1 = __nccwpck_require__(857); +class Context { + /** + * Hydrate the context from the environment + */ + constructor() { + var _a, _b, _c; + this.payload = {}; + if (process.env.GITHUB_EVENT_PATH) { + if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { + this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); + } + else { + const path = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); + } + } + this.eventName = process.env.GITHUB_EVENT_NAME; + this.sha = process.env.GITHUB_SHA; + this.ref = process.env.GITHUB_REF; + this.workflow = process.env.GITHUB_WORKFLOW; + this.action = process.env.GITHUB_ACTION; + this.actor = process.env.GITHUB_ACTOR; + this.job = process.env.GITHUB_JOB; + this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); + this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); + this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; + this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; + this.graphqlUrl = + (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; + } + get issue() { + const payload = this.payload; + return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); + } + get repo() { + if (process.env.GITHUB_REPOSITORY) { + const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); + return { owner, repo }; + } + if (this.payload.repository) { + return { + owner: this.payload.repository.owner.login, + repo: this.payload.repository.name + }; + } + throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); + } +} +exports.Context = Context; +//# sourceMappingURL=context.js.map + +/***/ }), + +/***/ 3228: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOctokit = exports.context = void 0; +const Context = __importStar(__nccwpck_require__(1648)); +const utils_1 = __nccwpck_require__(8006); +exports.context = new Context.Context(); +/** + * Returns a hydrated octokit ready to use for GitHub Actions + * + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set + */ +function getOctokit(token, options, ...additionalPlugins) { + const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins); + return new GitHubWithPlugins((0, utils_1.getOctokitOptions)(token, options)); +} +exports.getOctokit = getOctokit; +//# sourceMappingURL=github.js.map + +/***/ }), + +/***/ 5156: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getApiBaseUrl = exports.getProxyFetch = exports.getProxyAgentDispatcher = exports.getProxyAgent = exports.getAuthString = void 0; +const httpClient = __importStar(__nccwpck_require__(4844)); +const undici_1 = __nccwpck_require__(6752); +function getAuthString(token, options) { + if (!token && !options.auth) { + throw new Error('Parameter token or opts.auth is required'); + } + else if (token && options.auth) { + throw new Error('Parameters token and opts.auth may not both be specified'); + } + return typeof options.auth === 'string' ? options.auth : `token ${token}`; +} +exports.getAuthString = getAuthString; +function getProxyAgent(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgent(destinationUrl); +} +exports.getProxyAgent = getProxyAgent; +function getProxyAgentDispatcher(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgentDispatcher(destinationUrl); +} +exports.getProxyAgentDispatcher = getProxyAgentDispatcher; +function getProxyFetch(destinationUrl) { + const httpDispatcher = getProxyAgentDispatcher(destinationUrl); + const proxyFetch = (url, opts) => __awaiter(this, void 0, void 0, function* () { + return (0, undici_1.fetch)(url, Object.assign(Object.assign({}, opts), { dispatcher: httpDispatcher })); + }); + return proxyFetch; +} +exports.getProxyFetch = getProxyFetch; +function getApiBaseUrl() { + return process.env['GITHUB_API_URL'] || 'https://api.github.com'; +} +exports.getApiBaseUrl = getApiBaseUrl; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 8006: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0; +const Context = __importStar(__nccwpck_require__(1648)); +const Utils = __importStar(__nccwpck_require__(5156)); +// octokit + plugins +const core_1 = __nccwpck_require__(1897); +const plugin_rest_endpoint_methods_1 = __nccwpck_require__(4935); +const plugin_paginate_rest_1 = __nccwpck_require__(8082); +exports.context = new Context.Context(); +const baseUrl = Utils.getApiBaseUrl(); +exports.defaults = { + baseUrl, + request: { + agent: Utils.getProxyAgent(baseUrl), + fetch: Utils.getProxyFetch(baseUrl) + } +}; +exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults); +/** + * Convience function to correctly format Octokit Options to pass into the constructor. + * + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set + */ +function getOctokitOptions(token, options) { + const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller + // Auth + const auth = Utils.getAuthString(token, opts); + if (auth) { + opts.auth = auth; + } + return opts; +} +exports.getOctokitOptions = getOctokitOptions; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 4552: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Bearer ${this.token}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; +//# sourceMappingURL=auth.js.map + +/***/ }), + +/***/ 4844: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +/* eslint-disable @typescript-eslint/no-explicit-any */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; +const http = __importStar(__nccwpck_require__(8611)); +const https = __importStar(__nccwpck_require__(5692)); +const pm = __importStar(__nccwpck_require__(4988)); +const tunnel = __importStar(__nccwpck_require__(770)); +const undici_1 = __nccwpck_require__(6752); +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes || (exports.HttpCodes = HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers || (exports.Headers = Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes || (exports.MediaTypes = MediaTypes = {})); +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +function getProxyUrl(serverUrl) { + const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; +} +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); + } +} +exports.HttpClientError = HttpClientError; +class HttpClientResponse { + constructor(message) { + this.message = message; + } + readBody() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + })); + }); + } + readBodyBuffer() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + const chunks = []; + this.message.on('data', (chunk) => { + chunks.push(chunk); + }); + this.message.on('end', () => { + resolve(Buffer.concat(chunks)); + }); + })); + }); + } +} +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + const parsedUrl = new URL(requestUrl); + return parsedUrl.protocol === 'https:'; +} +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } + } + options(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + }); + } + get(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + }); + } + del(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + }); + } + post(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + }); + } + patch(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + }); + } + put(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + }); + } + head(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + }); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request(verb, requestUrl, stream, additionalHeaders); + }); + } + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + getJson(requestUrl, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + const res = yield this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + postJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + putJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + patchJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); + } + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + request(verb, requestUrl, data, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + const parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + do { + response = yield this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (const handler of this.handlers) { + if (handler.canHandleAuthentication(response)) { + authenticationHandler = handler; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (response.message.statusCode && + HttpRedirectCodes.includes(response.message.statusCode) && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + const parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol === 'https:' && + parsedUrl.protocol !== parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + yield response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (const header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = yield this.requestRaw(info, data); + redirectsRemaining--; + } + if (!response.message.statusCode || + !HttpResponseRetryCodes.includes(response.message.statusCode)) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + yield response.readBody(); + yield this._performExponentialBackoff(numTries); + } + } while (numTries < maxTries); + return response; + }); + } + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + function callbackForResult(err, res) { + if (err) { + reject(err); + } + else if (!res) { + // If `err` is not passed, then `res` must be passed. + reject(new Error('Unknown error')); + } + else { + resolve(res); + } + } + this.requestRawWithCallback(info, data, callbackForResult); + }); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + if (typeof data === 'string') { + if (!info.options.headers) { + info.options.headers = {}; + } + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + function handleResult(err, res) { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + } + const req = info.httpModule.request(info.options, (msg) => { + const res = new HttpClientResponse(msg); + handleResult(undefined, res); + }); + let socket; + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error(`Request timeout: ${info.options.path}`)); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } + } + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + const parsedUrl = new URL(serverUrl); + return this._getAgent(parsedUrl); + } + getAgentDispatcher(serverUrl) { + const parsedUrl = new URL(serverUrl); + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (!useProxy) { + return; + } + return this._getProxyAgentDispatcher(parsedUrl, proxyUrl); + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + for (const handler of this.handlers) { + handler.prepareRequest(info.options); + } + } + return info; + } + _mergeHeaders(headers) { + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); + } + return lowercaseKeys(headers || {}); + } + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (!useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. + if (proxyUrl && proxyUrl.hostname) { + const agentOptions = { + maxSockets, + keepAlive: this._keepAlive, + proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + })), { host: proxyUrl.hostname, port: proxyUrl.port }) + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if tunneling agent isn't assigned create a new agent + if (!agent) { + const options = { keepAlive: this._keepAlive, maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _getProxyAgentDispatcher(parsedUrl, proxyUrl) { + let proxyAgent; + if (this._keepAlive) { + proxyAgent = this._proxyAgentDispatcher; + } + // if agent is already assigned use that agent. + if (proxyAgent) { + return proxyAgent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && { + token: `Basic ${Buffer.from(`${proxyUrl.username}:${proxyUrl.password}`).toString('base64')}` + }))); + this._proxyAgentDispatcher = proxyAgent; + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, { + rejectUnauthorized: false + }); + } + return proxyAgent; + } + _performExponentialBackoff(retryNumber) { + return __awaiter(this, void 0, void 0, function* () { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + }); + } + _processResponse(res, options) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + const statusCode = res.message.statusCode || 0; + const response = { + statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode === HttpCodes.NotFound) { + resolve(response); + } + // get the result from the body + function dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + const a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + let obj; + let contents; + try { + contents = yield res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = `Failed request: (${statusCode})`; + } + const err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); + } + else { + resolve(response); + } + })); + }); + } +} +exports.HttpClient = HttpClient; +const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 4988: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.checkBypass = exports.getProxyUrl = void 0; +function getProxyUrl(reqUrl) { + const usingSsl = reqUrl.protocol === 'https:'; + if (checkBypass(reqUrl)) { + return undefined; + } + const proxyVar = (() => { + if (usingSsl) { + return process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + return process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + })(); + if (proxyVar) { + try { + return new DecodedURL(proxyVar); + } + catch (_a) { + if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) + return new DecodedURL(`http://${proxyVar}`); + } + } + else { + return undefined; + } +} +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { + return false; + } + const reqHost = reqUrl.hostname; + if (isLoopbackAddress(reqHost)) { + return true; + } + const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { + return false; + } + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); + } + else if (reqUrl.protocol === 'http:') { + reqPort = 80; + } + else if (reqUrl.protocol === 'https:') { + reqPort = 443; + } + // Format the request hostname and hostname with port + const upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (const upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperNoProxyItem === '*' || + upperReqHosts.some(x => x === upperNoProxyItem || + x.endsWith(`.${upperNoProxyItem}`) || + (upperNoProxyItem.startsWith('.') && + x.endsWith(`${upperNoProxyItem}`)))) { + return true; + } + } + return false; +} +exports.checkBypass = checkBypass; +function isLoopbackAddress(host) { + const hostLower = host.toLowerCase(); + return (hostLower === 'localhost' || + hostLower.startsWith('127.') || + hostLower.startsWith('[::1]') || + hostLower.startsWith('[0:0:0:0:0:0:0:1]')); +} +class DecodedURL extends URL { + constructor(url, base) { + super(url, base); + this._decodedUsername = decodeURIComponent(super.username); + this._decodedPassword = decodeURIComponent(super.password); + } + get username() { + return this._decodedUsername; + } + get password() { + return this._decodedPassword; + } +} +//# sourceMappingURL=proxy.js.map + +/***/ }), + +/***/ 5207: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var _a; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.READONLY = exports.UV_FS_O_EXLOCK = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rm = exports.rename = exports.readlink = exports.readdir = exports.open = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0; +const fs = __importStar(__nccwpck_require__(9896)); +const path = __importStar(__nccwpck_require__(6928)); +_a = fs.promises +// export const {open} = 'fs' +, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.open = _a.open, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rm = _a.rm, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; +// export const {open} = 'fs' +exports.IS_WINDOWS = process.platform === 'win32'; +// See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691 +exports.UV_FS_O_EXLOCK = 0x10000000; +exports.READONLY = fs.constants.O_RDONLY; +function exists(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield exports.stat(fsPath); + } + catch (err) { + if (err.code === 'ENOENT') { + return false; + } + throw err; + } + return true; + }); +} +exports.exists = exists; +function isDirectory(fsPath, useStat = false) { + return __awaiter(this, void 0, void 0, function* () { + const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); + return stats.isDirectory(); + }); +} +exports.isDirectory = isDirectory; +/** + * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: + * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). + */ +function isRooted(p) { + p = normalizeSeparators(p); + if (!p) { + throw new Error('isRooted() parameter "p" cannot be empty'); + } + if (exports.IS_WINDOWS) { + return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello + ); // e.g. C: or C:\hello + } + return p.startsWith('/'); +} +exports.isRooted = isRooted; +/** + * Best effort attempt to determine whether a file exists and is executable. + * @param filePath file path to check + * @param extensions additional file extensions to try + * @return if file exists and is executable, returns the file path. otherwise empty string. + */ +function tryGetExecutablePath(filePath, extensions) { + return __awaiter(this, void 0, void 0, function* () { + let stats = undefined; + try { + // test file exists + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // on Windows, test for valid extension + const upperExt = path.extname(filePath).toUpperCase(); + if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { + return filePath; + } + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + // try each extension + const originalFilePath = filePath; + for (const extension of extensions) { + filePath = originalFilePath + extension; + stats = undefined; + try { + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // preserve the case of the actual file (since an extension was appended) + try { + const directory = path.dirname(filePath); + const upperName = path.basename(filePath).toUpperCase(); + for (const actualName of yield exports.readdir(directory)) { + if (upperName === actualName.toUpperCase()) { + filePath = path.join(directory, actualName); + break; + } + } + } + catch (err) { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); + } + return filePath; + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + } + return ''; + }); +} +exports.tryGetExecutablePath = tryGetExecutablePath; +function normalizeSeparators(p) { + p = p || ''; + if (exports.IS_WINDOWS) { + // convert slashes on Windows + p = p.replace(/\//g, '\\'); + // remove redundant slashes + return p.replace(/\\\\+/g, '\\'); + } + // remove redundant slashes + return p.replace(/\/\/+/g, '/'); +} +// on Mac/Linux, test the execute bit +// R W X R W X R W X +// 256 128 64 32 16 8 4 2 1 +function isUnixExecutable(stats) { + return ((stats.mode & 1) > 0 || + ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || + ((stats.mode & 64) > 0 && stats.uid === process.getuid())); +} +// Get the path of cmd.exe in windows +function getCmdPath() { + var _a; + return (_a = process.env['COMSPEC']) !== null && _a !== void 0 ? _a : `cmd.exe`; +} +exports.getCmdPath = getCmdPath; +//# sourceMappingURL=io-util.js.map + +/***/ }), + +/***/ 4994: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.findInPath = exports.which = exports.mkdirP = exports.rmRF = exports.mv = exports.cp = void 0; +const assert_1 = __nccwpck_require__(2613); +const path = __importStar(__nccwpck_require__(6928)); +const ioUtil = __importStar(__nccwpck_require__(5207)); +/** + * Copies a file or folder. + * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js + * + * @param source source path + * @param dest destination path + * @param options optional. See CopyOptions. + */ +function cp(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + const { force, recursive, copySourceDirectory } = readCopyOptions(options); + const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; + // Dest is an existing file, but not forcing + if (destStat && destStat.isFile() && !force) { + return; + } + // If dest is an existing directory, should copy inside. + const newDest = destStat && destStat.isDirectory() && copySourceDirectory + ? path.join(dest, path.basename(source)) + : dest; + if (!(yield ioUtil.exists(source))) { + throw new Error(`no such file or directory: ${source}`); + } + const sourceStat = yield ioUtil.stat(source); + if (sourceStat.isDirectory()) { + if (!recursive) { + throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); + } + else { + yield cpDirRecursive(source, newDest, 0, force); + } + } + else { + if (path.relative(source, newDest) === '') { + // a file cannot be copied to itself + throw new Error(`'${newDest}' and '${source}' are the same file`); + } + yield copyFile(source, newDest, force); + } + }); +} +exports.cp = cp; +/** + * Moves a path. + * + * @param source source path + * @param dest destination path + * @param options optional. See MoveOptions. + */ +function mv(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + if (yield ioUtil.exists(dest)) { + let destExists = true; + if (yield ioUtil.isDirectory(dest)) { + // If dest is directory copy src into dest + dest = path.join(dest, path.basename(source)); + destExists = yield ioUtil.exists(dest); + } + if (destExists) { + if (options.force == null || options.force) { + yield rmRF(dest); + } + else { + throw new Error('Destination already exists'); + } + } + } + yield mkdirP(path.dirname(dest)); + yield ioUtil.rename(source, dest); + }); +} +exports.mv = mv; +/** + * Remove a path recursively with force + * + * @param inputPath path to remove + */ +function rmRF(inputPath) { + return __awaiter(this, void 0, void 0, function* () { + if (ioUtil.IS_WINDOWS) { + // Check for invalid characters + // https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file + if (/[*"<>|]/.test(inputPath)) { + throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows'); + } + } + try { + // note if path does not exist, error is silent + yield ioUtil.rm(inputPath, { + force: true, + maxRetries: 3, + recursive: true, + retryDelay: 300 + }); + } + catch (err) { + throw new Error(`File was unable to be removed ${err}`); + } + }); +} +exports.rmRF = rmRF; +/** + * Make a directory. Creates the full path with folders in between + * Will throw if it fails + * + * @param fsPath path to create + * @returns Promise + */ +function mkdirP(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + assert_1.ok(fsPath, 'a path argument must be provided'); + yield ioUtil.mkdir(fsPath, { recursive: true }); + }); +} +exports.mkdirP = mkdirP; +/** + * Returns path of a tool had the tool actually been invoked. Resolves via paths. + * If you check and the tool does not exist, it will throw. + * + * @param tool name of the tool + * @param check whether to check if tool exists + * @returns Promise path to tool + */ +function which(tool, check) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + // recursive when check=true + if (check) { + const result = yield which(tool, false); + if (!result) { + if (ioUtil.IS_WINDOWS) { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); + } + else { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); + } + } + return result; + } + const matches = yield findInPath(tool); + if (matches && matches.length > 0) { + return matches[0]; + } + return ''; + }); +} +exports.which = which; +/** + * Returns a list of all occurrences of the given tool on the system path. + * + * @returns Promise the paths of the tool + */ +function findInPath(tool) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + // build the list of extensions to try + const extensions = []; + if (ioUtil.IS_WINDOWS && process.env['PATHEXT']) { + for (const extension of process.env['PATHEXT'].split(path.delimiter)) { + if (extension) { + extensions.push(extension); + } + } + } + // if it's rooted, return it if exists. otherwise return empty. + if (ioUtil.isRooted(tool)) { + const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); + if (filePath) { + return [filePath]; + } + return []; + } + // if any path separators, return empty + if (tool.includes(path.sep)) { + return []; + } + // build the list of directories + // + // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, + // it feels like we should not do this. Checking the current directory seems like more of a use + // case of a shell, and the which() function exposed by the toolkit should strive for consistency + // across platforms. + const directories = []; + if (process.env.PATH) { + for (const p of process.env.PATH.split(path.delimiter)) { + if (p) { + directories.push(p); + } + } + } + // find all matches + const matches = []; + for (const directory of directories) { + const filePath = yield ioUtil.tryGetExecutablePath(path.join(directory, tool), extensions); + if (filePath) { + matches.push(filePath); + } + } + return matches; + }); +} +exports.findInPath = findInPath; +function readCopyOptions(options) { + const force = options.force == null ? true : options.force; + const recursive = Boolean(options.recursive); + const copySourceDirectory = options.copySourceDirectory == null + ? true + : Boolean(options.copySourceDirectory); + return { force, recursive, copySourceDirectory }; +} +function cpDirRecursive(sourceDir, destDir, currentDepth, force) { + return __awaiter(this, void 0, void 0, function* () { + // Ensure there is not a run away recursive copy + if (currentDepth >= 255) + return; + currentDepth++; + yield mkdirP(destDir); + const files = yield ioUtil.readdir(sourceDir); + for (const fileName of files) { + const srcFile = `${sourceDir}/${fileName}`; + const destFile = `${destDir}/${fileName}`; + const srcFileStat = yield ioUtil.lstat(srcFile); + if (srcFileStat.isDirectory()) { + // Recurse + yield cpDirRecursive(srcFile, destFile, currentDepth, force); + } + else { + yield copyFile(srcFile, destFile, force); + } + } + // Change the mode for the newly created directory + yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); + }); +} +// Buffered file copy +function copyFile(srcFile, destFile, force) { + return __awaiter(this, void 0, void 0, function* () { + if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { + // unlink/re-link it + try { + yield ioUtil.lstat(destFile); + yield ioUtil.unlink(destFile); + } + catch (e) { + // Try to override file permission + if (e.code === 'EPERM') { + yield ioUtil.chmod(destFile, '0666'); + yield ioUtil.unlink(destFile); + } + // other errors = it doesn't exist, no work to do + } + // Copy over symlink + const symlinkFull = yield ioUtil.readlink(srcFile); + yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null); + } + else if (!(yield ioUtil.exists(destFile)) || force) { + yield ioUtil.copyFile(srcFile, destFile); + } + }); +} +//# sourceMappingURL=io.js.map + +/***/ }), + +/***/ 6493: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveHttpAuthSchemeConfig = exports.defaultSecretsManagerHttpAuthSchemeProvider = exports.defaultSecretsManagerHttpAuthSchemeParametersProvider = void 0; +const core_1 = __nccwpck_require__(8704); +const util_middleware_1 = __nccwpck_require__(6324); +const defaultSecretsManagerHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: (0, util_middleware_1.getSmithyContext)(context).operation, + region: (await (0, util_middleware_1.normalizeProvider)(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +exports.defaultSecretsManagerHttpAuthSchemeParametersProvider = defaultSecretsManagerHttpAuthSchemeParametersProvider; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "secretsmanager", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +const defaultSecretsManagerHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +exports.defaultSecretsManagerHttpAuthSchemeProvider = defaultSecretsManagerHttpAuthSchemeProvider; +const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = (0, core_1.resolveAwsSdkSigV4Config)(config); + return { + ...config_0, + }; +}; +exports.resolveHttpAuthSchemeConfig = resolveHttpAuthSchemeConfig; + + +/***/ }), + +/***/ 3267: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = __nccwpck_require__(3068); +const util_endpoints_2 = __nccwpck_require__(9674); +const ruleset_1 = __nccwpck_require__(1176); +const cache = new util_endpoints_2.EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS"], +}); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => (0, util_endpoints_2.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; +util_endpoints_2.customEndpointFunctions.aws = util_endpoints_1.awsEndpointFunctions; + + +/***/ }), + +/***/ 1176: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ruleSet = void 0; +const y = "required", z = "fn", A = "argv", B = "ref", C = "properties", D = "headers"; +const a = true, b = "isSet", c = "booleanEquals", d = "error", e = "endpoint", f = "tree", g = "PartitionResult", h = "stringEquals", i = { [y]: false, "type": "String" }, j = { [y]: true, "default": false, "type": "Boolean" }, k = { [B]: "Endpoint" }, l = { [z]: c, [A]: [{ [B]: "UseFIPS" }, true] }, m = { [z]: c, [A]: [{ [B]: "UseDualStack" }, true] }, n = {}, o = { [z]: "getAttr", [A]: [{ [B]: g }, "supportsFIPS"] }, p = { [z]: c, [A]: [true, { [z]: "getAttr", [A]: [{ [B]: g }, "supportsDualStack"] }] }, q = { [z]: "getAttr", [A]: [{ [B]: g }, "name"] }, r = { "url": "https://secretsmanager-fips.{Region}.amazonaws.com", [C]: {}, [D]: {} }, s = { "url": "https://secretsmanager.{Region}.amazonaws.com", [C]: {}, [D]: {} }, t = [l], u = [m], v = [{ [B]: "Region" }], w = [{ [z]: h, [A]: ["aws", q] }], x = [{ [z]: h, [A]: ["aws-us-gov", q] }]; +const _data = { version: "1.0", parameters: { Region: i, UseDualStack: j, UseFIPS: j, Endpoint: i }, rules: [{ conditions: [{ [z]: b, [A]: [k] }], rules: [{ conditions: t, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: d }, { conditions: u, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: d }, { endpoint: { url: k, [C]: n, [D]: n }, type: e }], type: f }, { conditions: [{ [z]: b, [A]: v }], rules: [{ conditions: [{ [z]: "aws.partition", [A]: v, assign: g }], rules: [{ conditions: [l, m], rules: [{ conditions: [{ [z]: c, [A]: [a, o] }, p], rules: [{ conditions: w, endpoint: r, type: e }, { conditions: x, endpoint: r, type: e }, { endpoint: { url: "https://secretsmanager-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", [C]: n, [D]: n }, type: e }], type: f }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: d }], type: f }, { conditions: t, rules: [{ conditions: [{ [z]: c, [A]: [o, a] }], rules: [{ endpoint: { url: "https://secretsmanager-fips.{Region}.{PartitionResult#dnsSuffix}", [C]: n, [D]: n }, type: e }], type: f }, { error: "FIPS is enabled but this partition does not support FIPS", type: d }], type: f }, { conditions: u, rules: [{ conditions: [p], rules: [{ conditions: w, endpoint: s, type: e }, { conditions: [{ [z]: h, [A]: ["aws-cn", q] }], endpoint: { url: "https://secretsmanager.{Region}.amazonaws.com.cn", [C]: n, [D]: n }, type: e }, { conditions: x, endpoint: s, type: e }, { endpoint: { url: "https://secretsmanager.{Region}.{PartitionResult#dualStackDnsSuffix}", [C]: n, [D]: n }, type: e }], type: f }, { error: "DualStack is enabled but this partition does not support DualStack", type: d }], type: f }, { endpoint: { url: "https://secretsmanager.{Region}.{PartitionResult#dnsSuffix}", [C]: n, [D]: n }, type: e }], type: f }], type: f }, { error: "Invalid Configuration: Missing Region", type: d }] }; +exports.ruleSet = _data; + + +/***/ }), + +/***/ 2994: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + BatchGetSecretValueCommand: () => BatchGetSecretValueCommand, + BatchGetSecretValueResponseFilterSensitiveLog: () => BatchGetSecretValueResponseFilterSensitiveLog, + CancelRotateSecretCommand: () => CancelRotateSecretCommand, + CreateSecretCommand: () => CreateSecretCommand, + CreateSecretRequestFilterSensitiveLog: () => CreateSecretRequestFilterSensitiveLog, + DecryptionFailure: () => DecryptionFailure, + DeleteResourcePolicyCommand: () => DeleteResourcePolicyCommand, + DeleteSecretCommand: () => DeleteSecretCommand, + DescribeSecretCommand: () => DescribeSecretCommand, + EncryptionFailure: () => EncryptionFailure, + FilterNameStringType: () => FilterNameStringType, + GetRandomPasswordCommand: () => GetRandomPasswordCommand, + GetRandomPasswordResponseFilterSensitiveLog: () => GetRandomPasswordResponseFilterSensitiveLog, + GetResourcePolicyCommand: () => GetResourcePolicyCommand, + GetSecretValueCommand: () => GetSecretValueCommand, + GetSecretValueResponseFilterSensitiveLog: () => GetSecretValueResponseFilterSensitiveLog, + InternalServiceError: () => InternalServiceError, + InvalidNextTokenException: () => InvalidNextTokenException, + InvalidParameterException: () => InvalidParameterException, + InvalidRequestException: () => InvalidRequestException, + LimitExceededException: () => LimitExceededException, + ListSecretVersionIdsCommand: () => ListSecretVersionIdsCommand, + ListSecretsCommand: () => ListSecretsCommand, + MalformedPolicyDocumentException: () => MalformedPolicyDocumentException, + PreconditionNotMetException: () => PreconditionNotMetException, + PublicPolicyException: () => PublicPolicyException, + PutResourcePolicyCommand: () => PutResourcePolicyCommand, + PutSecretValueCommand: () => PutSecretValueCommand, + PutSecretValueRequestFilterSensitiveLog: () => PutSecretValueRequestFilterSensitiveLog, + RemoveRegionsFromReplicationCommand: () => RemoveRegionsFromReplicationCommand, + ReplicateSecretToRegionsCommand: () => ReplicateSecretToRegionsCommand, + ResourceExistsException: () => ResourceExistsException, + ResourceNotFoundException: () => ResourceNotFoundException, + RestoreSecretCommand: () => RestoreSecretCommand, + RotateSecretCommand: () => RotateSecretCommand, + SecretValueEntryFilterSensitiveLog: () => SecretValueEntryFilterSensitiveLog, + SecretsManager: () => SecretsManager, + SecretsManagerClient: () => SecretsManagerClient, + SecretsManagerServiceException: () => SecretsManagerServiceException, + SortOrderType: () => SortOrderType, + StatusType: () => StatusType, + StopReplicationToReplicaCommand: () => StopReplicationToReplicaCommand, + TagResourceCommand: () => TagResourceCommand, + UntagResourceCommand: () => UntagResourceCommand, + UpdateSecretCommand: () => UpdateSecretCommand, + UpdateSecretRequestFilterSensitiveLog: () => UpdateSecretRequestFilterSensitiveLog, + UpdateSecretVersionStageCommand: () => UpdateSecretVersionStageCommand, + ValidateResourcePolicyCommand: () => ValidateResourcePolicyCommand, + __Client: () => import_smithy_client.Client, + paginateBatchGetSecretValue: () => paginateBatchGetSecretValue, + paginateListSecretVersionIds: () => paginateListSecretVersionIds, + paginateListSecrets: () => paginateListSecrets +}); +module.exports = __toCommonJS(src_exports); + +// src/SecretsManagerClient.ts +var import_middleware_host_header = __nccwpck_require__(2590); +var import_middleware_logger = __nccwpck_require__(5242); +var import_middleware_recursion_detection = __nccwpck_require__(1568); +var import_middleware_user_agent = __nccwpck_require__(2959); +var import_config_resolver = __nccwpck_require__(9316); +var import_core = __nccwpck_require__(402); +var import_middleware_content_length = __nccwpck_require__(7212); +var import_middleware_endpoint = __nccwpck_require__(99); +var import_middleware_retry = __nccwpck_require__(9618); + +var import_httpAuthSchemeProvider = __nccwpck_require__(6493); + +// src/endpoint/EndpointParameters.ts +var resolveClientEndpointParameters = /* @__PURE__ */ __name((options) => { + return { + ...options, + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "secretsmanager" + }; +}, "resolveClientEndpointParameters"); +var commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" } +}; + +// src/SecretsManagerClient.ts +var import_runtimeConfig = __nccwpck_require__(2580); + +// src/runtimeExtensions.ts +var import_region_config_resolver = __nccwpck_require__(6463); +var import_protocol_http = __nccwpck_require__(2356); +var import_smithy_client = __nccwpck_require__(1411); + +// src/auth/httpAuthExtensionConfiguration.ts +var getHttpAuthExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + } + }; +}, "getHttpAuthExtensionConfiguration"); +var resolveHttpAuthRuntimeConfig = /* @__PURE__ */ __name((config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials() + }; +}, "resolveHttpAuthRuntimeConfig"); + +// src/runtimeExtensions.ts +var asPartial = /* @__PURE__ */ __name((t) => t, "asPartial"); +var resolveRuntimeExtensions = /* @__PURE__ */ __name((runtimeConfig, extensions) => { + const extensionConfiguration = { + ...asPartial((0, import_region_config_resolver.getAwsRegionExtensionConfiguration)(runtimeConfig)), + ...asPartial((0, import_smithy_client.getDefaultExtensionConfiguration)(runtimeConfig)), + ...asPartial((0, import_protocol_http.getHttpHandlerExtensionConfiguration)(runtimeConfig)), + ...asPartial(getHttpAuthExtensionConfiguration(runtimeConfig)) + }; + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return { + ...runtimeConfig, + ...(0, import_region_config_resolver.resolveAwsRegionExtensionConfiguration)(extensionConfiguration), + ...(0, import_smithy_client.resolveDefaultRuntimeConfig)(extensionConfiguration), + ...(0, import_protocol_http.resolveHttpHandlerRuntimeConfig)(extensionConfiguration), + ...resolveHttpAuthRuntimeConfig(extensionConfiguration) + }; +}, "resolveRuntimeExtensions"); + +// src/SecretsManagerClient.ts +var _SecretsManagerClient = class _SecretsManagerClient extends import_smithy_client.Client { + constructor(...[configuration]) { + const _config_0 = (0, import_runtimeConfig.getRuntimeConfig)(configuration || {}); + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = (0, import_middleware_user_agent.resolveUserAgentConfig)(_config_1); + const _config_3 = (0, import_middleware_retry.resolveRetryConfig)(_config_2); + const _config_4 = (0, import_config_resolver.resolveRegionConfig)(_config_3); + const _config_5 = (0, import_middleware_host_header.resolveHostHeaderConfig)(_config_4); + const _config_6 = (0, import_middleware_endpoint.resolveEndpointConfig)(_config_5); + const _config_7 = (0, import_httpAuthSchemeProvider.resolveHttpAuthSchemeConfig)(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, (configuration == null ? void 0 : configuration.extensions) || []); + super(_config_8); + this.config = _config_8; + this.middlewareStack.use((0, import_middleware_user_agent.getUserAgentPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_retry.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_content_length.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_host_header.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_logger.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_recursion_detection.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use( + (0, import_core.getHttpAuthSchemeEndpointRuleSetPlugin)(this.config, { + httpAuthSchemeParametersProvider: import_httpAuthSchemeProvider.defaultSecretsManagerHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new import_core.DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials + }) + }) + ); + this.middlewareStack.use((0, import_core.getHttpSigningPlugin)(this.config)); + } + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy() { + super.destroy(); + } +}; +__name(_SecretsManagerClient, "SecretsManagerClient"); +var SecretsManagerClient = _SecretsManagerClient; + +// src/SecretsManager.ts + + +// src/commands/BatchGetSecretValueCommand.ts + +var import_middleware_serde = __nccwpck_require__(3255); + + +// src/models/models_0.ts + + +// src/models/SecretsManagerServiceException.ts + +var _SecretsManagerServiceException = class _SecretsManagerServiceException extends import_smithy_client.ServiceException { + /** + * @internal + */ + constructor(options) { + super(options); + Object.setPrototypeOf(this, _SecretsManagerServiceException.prototype); + } +}; +__name(_SecretsManagerServiceException, "SecretsManagerServiceException"); +var SecretsManagerServiceException = _SecretsManagerServiceException; + +// src/models/models_0.ts +var FilterNameStringType = { + all: "all", + description: "description", + name: "name", + owning_service: "owning-service", + primary_region: "primary-region", + tag_key: "tag-key", + tag_value: "tag-value" +}; +var _DecryptionFailure = class _DecryptionFailure extends SecretsManagerServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "DecryptionFailure", + $fault: "client", + ...opts + }); + this.name = "DecryptionFailure"; + this.$fault = "client"; + Object.setPrototypeOf(this, _DecryptionFailure.prototype); + this.Message = opts.Message; + } +}; +__name(_DecryptionFailure, "DecryptionFailure"); +var DecryptionFailure = _DecryptionFailure; +var _InternalServiceError = class _InternalServiceError extends SecretsManagerServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "InternalServiceError", + $fault: "server", + ...opts + }); + this.name = "InternalServiceError"; + this.$fault = "server"; + Object.setPrototypeOf(this, _InternalServiceError.prototype); + this.Message = opts.Message; + } +}; +__name(_InternalServiceError, "InternalServiceError"); +var InternalServiceError = _InternalServiceError; +var _InvalidNextTokenException = class _InvalidNextTokenException extends SecretsManagerServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidNextTokenException", + $fault: "client", + ...opts + }); + this.name = "InvalidNextTokenException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidNextTokenException.prototype); + this.Message = opts.Message; + } +}; +__name(_InvalidNextTokenException, "InvalidNextTokenException"); +var InvalidNextTokenException = _InvalidNextTokenException; +var _InvalidParameterException = class _InvalidParameterException extends SecretsManagerServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidParameterException", + $fault: "client", + ...opts + }); + this.name = "InvalidParameterException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidParameterException.prototype); + this.Message = opts.Message; + } +}; +__name(_InvalidParameterException, "InvalidParameterException"); +var InvalidParameterException = _InvalidParameterException; +var _InvalidRequestException = class _InvalidRequestException extends SecretsManagerServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts + }); + this.name = "InvalidRequestException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidRequestException.prototype); + this.Message = opts.Message; + } +}; +__name(_InvalidRequestException, "InvalidRequestException"); +var InvalidRequestException = _InvalidRequestException; +var _ResourceNotFoundException = class _ResourceNotFoundException extends SecretsManagerServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "ResourceNotFoundException", + $fault: "client", + ...opts + }); + this.name = "ResourceNotFoundException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _ResourceNotFoundException.prototype); + this.Message = opts.Message; + } +}; +__name(_ResourceNotFoundException, "ResourceNotFoundException"); +var ResourceNotFoundException = _ResourceNotFoundException; +var StatusType = { + Failed: "Failed", + InProgress: "InProgress", + InSync: "InSync" +}; +var _EncryptionFailure = class _EncryptionFailure extends SecretsManagerServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "EncryptionFailure", + $fault: "client", + ...opts + }); + this.name = "EncryptionFailure"; + this.$fault = "client"; + Object.setPrototypeOf(this, _EncryptionFailure.prototype); + this.Message = opts.Message; + } +}; +__name(_EncryptionFailure, "EncryptionFailure"); +var EncryptionFailure = _EncryptionFailure; +var _LimitExceededException = class _LimitExceededException extends SecretsManagerServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "LimitExceededException", + $fault: "client", + ...opts + }); + this.name = "LimitExceededException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _LimitExceededException.prototype); + this.Message = opts.Message; + } +}; +__name(_LimitExceededException, "LimitExceededException"); +var LimitExceededException = _LimitExceededException; +var _MalformedPolicyDocumentException = class _MalformedPolicyDocumentException extends SecretsManagerServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "MalformedPolicyDocumentException", + $fault: "client", + ...opts + }); + this.name = "MalformedPolicyDocumentException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _MalformedPolicyDocumentException.prototype); + this.Message = opts.Message; + } +}; +__name(_MalformedPolicyDocumentException, "MalformedPolicyDocumentException"); +var MalformedPolicyDocumentException = _MalformedPolicyDocumentException; +var _PreconditionNotMetException = class _PreconditionNotMetException extends SecretsManagerServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "PreconditionNotMetException", + $fault: "client", + ...opts + }); + this.name = "PreconditionNotMetException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _PreconditionNotMetException.prototype); + this.Message = opts.Message; + } +}; +__name(_PreconditionNotMetException, "PreconditionNotMetException"); +var PreconditionNotMetException = _PreconditionNotMetException; +var _ResourceExistsException = class _ResourceExistsException extends SecretsManagerServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "ResourceExistsException", + $fault: "client", + ...opts + }); + this.name = "ResourceExistsException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _ResourceExistsException.prototype); + this.Message = opts.Message; + } +}; +__name(_ResourceExistsException, "ResourceExistsException"); +var ResourceExistsException = _ResourceExistsException; +var SortOrderType = { + asc: "asc", + desc: "desc" +}; +var _PublicPolicyException = class _PublicPolicyException extends SecretsManagerServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "PublicPolicyException", + $fault: "client", + ...opts + }); + this.name = "PublicPolicyException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _PublicPolicyException.prototype); + this.Message = opts.Message; + } +}; +__name(_PublicPolicyException, "PublicPolicyException"); +var PublicPolicyException = _PublicPolicyException; +var SecretValueEntryFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SecretBinary && { SecretBinary: import_smithy_client.SENSITIVE_STRING }, + ...obj.SecretString && { SecretString: import_smithy_client.SENSITIVE_STRING } +}), "SecretValueEntryFilterSensitiveLog"); +var BatchGetSecretValueResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SecretValues && { SecretValues: obj.SecretValues.map((item) => SecretValueEntryFilterSensitiveLog(item)) } +}), "BatchGetSecretValueResponseFilterSensitiveLog"); +var CreateSecretRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SecretBinary && { SecretBinary: import_smithy_client.SENSITIVE_STRING }, + ...obj.SecretString && { SecretString: import_smithy_client.SENSITIVE_STRING } +}), "CreateSecretRequestFilterSensitiveLog"); +var GetRandomPasswordResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.RandomPassword && { RandomPassword: import_smithy_client.SENSITIVE_STRING } +}), "GetRandomPasswordResponseFilterSensitiveLog"); +var GetSecretValueResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SecretBinary && { SecretBinary: import_smithy_client.SENSITIVE_STRING }, + ...obj.SecretString && { SecretString: import_smithy_client.SENSITIVE_STRING } +}), "GetSecretValueResponseFilterSensitiveLog"); +var PutSecretValueRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SecretBinary && { SecretBinary: import_smithy_client.SENSITIVE_STRING }, + ...obj.SecretString && { SecretString: import_smithy_client.SENSITIVE_STRING }, + ...obj.RotationToken && { RotationToken: import_smithy_client.SENSITIVE_STRING } +}), "PutSecretValueRequestFilterSensitiveLog"); +var UpdateSecretRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SecretBinary && { SecretBinary: import_smithy_client.SENSITIVE_STRING }, + ...obj.SecretString && { SecretString: import_smithy_client.SENSITIVE_STRING } +}), "UpdateSecretRequestFilterSensitiveLog"); + +// src/protocols/Aws_json1_1.ts +var import_core2 = __nccwpck_require__(8704); + + +var import_uuid = __nccwpck_require__(2048); +var se_BatchGetSecretValueCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("BatchGetSecretValue"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_BatchGetSecretValueCommand"); +var se_CancelRotateSecretCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("CancelRotateSecret"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_CancelRotateSecretCommand"); +var se_CreateSecretCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("CreateSecret"); + let body; + body = JSON.stringify(se_CreateSecretRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_CreateSecretCommand"); +var se_DeleteResourcePolicyCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DeleteResourcePolicy"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DeleteResourcePolicyCommand"); +var se_DeleteSecretCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DeleteSecret"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DeleteSecretCommand"); +var se_DescribeSecretCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("DescribeSecret"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DescribeSecretCommand"); +var se_GetRandomPasswordCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("GetRandomPassword"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_GetRandomPasswordCommand"); +var se_GetResourcePolicyCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("GetResourcePolicy"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_GetResourcePolicyCommand"); +var se_GetSecretValueCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("GetSecretValue"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_GetSecretValueCommand"); +var se_ListSecretsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListSecrets"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListSecretsCommand"); +var se_ListSecretVersionIdsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ListSecretVersionIds"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ListSecretVersionIdsCommand"); +var se_PutResourcePolicyCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("PutResourcePolicy"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_PutResourcePolicyCommand"); +var se_PutSecretValueCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("PutSecretValue"); + let body; + body = JSON.stringify(se_PutSecretValueRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_PutSecretValueCommand"); +var se_RemoveRegionsFromReplicationCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("RemoveRegionsFromReplication"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_RemoveRegionsFromReplicationCommand"); +var se_ReplicateSecretToRegionsCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ReplicateSecretToRegions"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ReplicateSecretToRegionsCommand"); +var se_RestoreSecretCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("RestoreSecret"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_RestoreSecretCommand"); +var se_RotateSecretCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("RotateSecret"); + let body; + body = JSON.stringify(se_RotateSecretRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_RotateSecretCommand"); +var se_StopReplicationToReplicaCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("StopReplicationToReplica"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_StopReplicationToReplicaCommand"); +var se_TagResourceCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("TagResource"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_TagResourceCommand"); +var se_UntagResourceCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UntagResource"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UntagResourceCommand"); +var se_UpdateSecretCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateSecret"); + let body; + body = JSON.stringify(se_UpdateSecretRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateSecretCommand"); +var se_UpdateSecretVersionStageCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("UpdateSecretVersionStage"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_UpdateSecretVersionStageCommand"); +var se_ValidateResourcePolicyCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = sharedHeaders("ValidateResourcePolicy"); + let body; + body = JSON.stringify((0, import_smithy_client._json)(input)); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_ValidateResourcePolicyCommand"); +var de_BatchGetSecretValueCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core2.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_BatchGetSecretValueResponse(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_BatchGetSecretValueCommand"); +var de_CancelRotateSecretCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core2.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_CancelRotateSecretCommand"); +var de_CreateSecretCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core2.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_CreateSecretResponse(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_CreateSecretCommand"); +var de_DeleteResourcePolicyCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core2.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DeleteResourcePolicyCommand"); +var de_DeleteSecretCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core2.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DeleteSecretResponse(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DeleteSecretCommand"); +var de_DescribeSecretCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core2.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_DescribeSecretResponse(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DescribeSecretCommand"); +var de_GetRandomPasswordCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core2.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_GetRandomPasswordCommand"); +var de_GetResourcePolicyCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core2.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_GetResourcePolicyCommand"); +var de_GetSecretValueCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core2.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_GetSecretValueResponse(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_GetSecretValueCommand"); +var de_ListSecretsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core2.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ListSecretsResponse(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListSecretsCommand"); +var de_ListSecretVersionIdsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core2.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ListSecretVersionIdsResponse(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ListSecretVersionIdsCommand"); +var de_PutResourcePolicyCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core2.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_PutResourcePolicyCommand"); +var de_PutSecretValueCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core2.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_PutSecretValueCommand"); +var de_RemoveRegionsFromReplicationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core2.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_RemoveRegionsFromReplicationResponse(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_RemoveRegionsFromReplicationCommand"); +var de_ReplicateSecretToRegionsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core2.parseJsonBody)(output.body, context); + let contents = {}; + contents = de_ReplicateSecretToRegionsResponse(data, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ReplicateSecretToRegionsCommand"); +var de_RestoreSecretCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core2.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_RestoreSecretCommand"); +var de_RotateSecretCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core2.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_RotateSecretCommand"); +var de_StopReplicationToReplicaCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core2.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_StopReplicationToReplicaCommand"); +var de_TagResourceCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + await (0, import_smithy_client.collectBody)(output.body, context); + const response = { + $metadata: deserializeMetadata(output) + }; + return response; +}, "de_TagResourceCommand"); +var de_UntagResourceCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + await (0, import_smithy_client.collectBody)(output.body, context); + const response = { + $metadata: deserializeMetadata(output) + }; + return response; +}, "de_UntagResourceCommand"); +var de_UpdateSecretCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core2.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateSecretCommand"); +var de_UpdateSecretVersionStageCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core2.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_UpdateSecretVersionStageCommand"); +var de_ValidateResourcePolicyCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core2.parseJsonBody)(output.body, context); + let contents = {}; + contents = (0, import_smithy_client._json)(data); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_ValidateResourcePolicyCommand"); +var de_CommandError = /* @__PURE__ */ __name(async (output, context) => { + const parsedOutput = { + ...output, + body: await (0, import_core2.parseJsonErrorBody)(output.body, context) + }; + const errorCode = (0, import_core2.loadRestJsonErrorCode)(output, parsedOutput.body); + switch (errorCode) { + case "DecryptionFailure": + case "com.amazonaws.secretsmanager#DecryptionFailure": + throw await de_DecryptionFailureRes(parsedOutput, context); + case "InternalServiceError": + case "com.amazonaws.secretsmanager#InternalServiceError": + throw await de_InternalServiceErrorRes(parsedOutput, context); + case "InvalidNextTokenException": + case "com.amazonaws.secretsmanager#InvalidNextTokenException": + throw await de_InvalidNextTokenExceptionRes(parsedOutput, context); + case "InvalidParameterException": + case "com.amazonaws.secretsmanager#InvalidParameterException": + throw await de_InvalidParameterExceptionRes(parsedOutput, context); + case "InvalidRequestException": + case "com.amazonaws.secretsmanager#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.secretsmanager#ResourceNotFoundException": + throw await de_ResourceNotFoundExceptionRes(parsedOutput, context); + case "EncryptionFailure": + case "com.amazonaws.secretsmanager#EncryptionFailure": + throw await de_EncryptionFailureRes(parsedOutput, context); + case "LimitExceededException": + case "com.amazonaws.secretsmanager#LimitExceededException": + throw await de_LimitExceededExceptionRes(parsedOutput, context); + case "MalformedPolicyDocumentException": + case "com.amazonaws.secretsmanager#MalformedPolicyDocumentException": + throw await de_MalformedPolicyDocumentExceptionRes(parsedOutput, context); + case "PreconditionNotMetException": + case "com.amazonaws.secretsmanager#PreconditionNotMetException": + throw await de_PreconditionNotMetExceptionRes(parsedOutput, context); + case "ResourceExistsException": + case "com.amazonaws.secretsmanager#ResourceExistsException": + throw await de_ResourceExistsExceptionRes(parsedOutput, context); + case "PublicPolicyException": + case "com.amazonaws.secretsmanager#PublicPolicyException": + throw await de_PublicPolicyExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode + }); + } +}, "de_CommandError"); +var de_DecryptionFailureRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new DecryptionFailure({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_DecryptionFailureRes"); +var de_EncryptionFailureRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new EncryptionFailure({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_EncryptionFailureRes"); +var de_InternalServiceErrorRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new InternalServiceError({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_InternalServiceErrorRes"); +var de_InvalidNextTokenExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new InvalidNextTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_InvalidNextTokenExceptionRes"); +var de_InvalidParameterExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new InvalidParameterException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_InvalidParameterExceptionRes"); +var de_InvalidRequestExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_InvalidRequestExceptionRes"); +var de_LimitExceededExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new LimitExceededException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_LimitExceededExceptionRes"); +var de_MalformedPolicyDocumentExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new MalformedPolicyDocumentException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_MalformedPolicyDocumentExceptionRes"); +var de_PreconditionNotMetExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new PreconditionNotMetException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_PreconditionNotMetExceptionRes"); +var de_PublicPolicyExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new PublicPolicyException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_PublicPolicyExceptionRes"); +var de_ResourceExistsExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ResourceExistsException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ResourceExistsExceptionRes"); +var de_ResourceNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = (0, import_smithy_client._json)(body); + const exception = new ResourceNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ResourceNotFoundExceptionRes"); +var se_CreateSecretRequest = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + AddReplicaRegions: import_smithy_client._json, + ClientRequestToken: [true, (_) => _ ?? (0, import_uuid.v4)()], + Description: [], + ForceOverwriteReplicaSecret: [], + KmsKeyId: [], + Name: [], + SecretBinary: context.base64Encoder, + SecretString: [], + Tags: import_smithy_client._json + }); +}, "se_CreateSecretRequest"); +var se_PutSecretValueRequest = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ClientRequestToken: [true, (_) => _ ?? (0, import_uuid.v4)()], + RotationToken: [], + SecretBinary: context.base64Encoder, + SecretId: [], + SecretString: [], + VersionStages: import_smithy_client._json + }); +}, "se_PutSecretValueRequest"); +var se_RotateSecretRequest = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ClientRequestToken: [true, (_) => _ ?? (0, import_uuid.v4)()], + RotateImmediately: [], + RotationLambdaARN: [], + RotationRules: import_smithy_client._json, + SecretId: [] + }); +}, "se_RotateSecretRequest"); +var se_UpdateSecretRequest = /* @__PURE__ */ __name((input, context) => { + return (0, import_smithy_client.take)(input, { + ClientRequestToken: [true, (_) => _ ?? (0, import_uuid.v4)()], + Description: [], + KmsKeyId: [], + SecretBinary: context.base64Encoder, + SecretId: [], + SecretString: [] + }); +}, "se_UpdateSecretRequest"); +var de_BatchGetSecretValueResponse = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + Errors: import_smithy_client._json, + NextToken: import_smithy_client.expectString, + SecretValues: (_) => de_SecretValuesType(_, context) + }); +}, "de_BatchGetSecretValueResponse"); +var de_CreateSecretResponse = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ARN: import_smithy_client.expectString, + Name: import_smithy_client.expectString, + ReplicationStatus: (_) => de_ReplicationStatusListType(_, context), + VersionId: import_smithy_client.expectString + }); +}, "de_CreateSecretResponse"); +var de_DeleteSecretResponse = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ARN: import_smithy_client.expectString, + DeletionDate: (_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), + Name: import_smithy_client.expectString + }); +}, "de_DeleteSecretResponse"); +var de_DescribeSecretResponse = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ARN: import_smithy_client.expectString, + CreatedDate: (_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), + DeletedDate: (_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), + Description: import_smithy_client.expectString, + KmsKeyId: import_smithy_client.expectString, + LastAccessedDate: (_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), + LastChangedDate: (_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), + LastRotatedDate: (_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), + Name: import_smithy_client.expectString, + NextRotationDate: (_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), + OwningService: import_smithy_client.expectString, + PrimaryRegion: import_smithy_client.expectString, + ReplicationStatus: (_) => de_ReplicationStatusListType(_, context), + RotationEnabled: import_smithy_client.expectBoolean, + RotationLambdaARN: import_smithy_client.expectString, + RotationRules: import_smithy_client._json, + Tags: import_smithy_client._json, + VersionIdsToStages: import_smithy_client._json + }); +}, "de_DescribeSecretResponse"); +var de_GetSecretValueResponse = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ARN: import_smithy_client.expectString, + CreatedDate: (_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), + Name: import_smithy_client.expectString, + SecretBinary: context.base64Decoder, + SecretString: import_smithy_client.expectString, + VersionId: import_smithy_client.expectString, + VersionStages: import_smithy_client._json + }); +}, "de_GetSecretValueResponse"); +var de_ListSecretsResponse = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + NextToken: import_smithy_client.expectString, + SecretList: (_) => de_SecretListType(_, context) + }); +}, "de_ListSecretsResponse"); +var de_ListSecretVersionIdsResponse = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ARN: import_smithy_client.expectString, + Name: import_smithy_client.expectString, + NextToken: import_smithy_client.expectString, + Versions: (_) => de_SecretVersionsListType(_, context) + }); +}, "de_ListSecretVersionIdsResponse"); +var de_RemoveRegionsFromReplicationResponse = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ARN: import_smithy_client.expectString, + ReplicationStatus: (_) => de_ReplicationStatusListType(_, context) + }); +}, "de_RemoveRegionsFromReplicationResponse"); +var de_ReplicateSecretToRegionsResponse = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ARN: import_smithy_client.expectString, + ReplicationStatus: (_) => de_ReplicationStatusListType(_, context) + }); +}, "de_ReplicateSecretToRegionsResponse"); +var de_ReplicationStatusListType = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_ReplicationStatusType(entry, context); + }); + return retVal; +}, "de_ReplicationStatusListType"); +var de_ReplicationStatusType = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + KmsKeyId: import_smithy_client.expectString, + LastAccessedDate: (_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), + Region: import_smithy_client.expectString, + Status: import_smithy_client.expectString, + StatusMessage: import_smithy_client.expectString + }); +}, "de_ReplicationStatusType"); +var de_SecretListEntry = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ARN: import_smithy_client.expectString, + CreatedDate: (_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), + DeletedDate: (_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), + Description: import_smithy_client.expectString, + KmsKeyId: import_smithy_client.expectString, + LastAccessedDate: (_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), + LastChangedDate: (_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), + LastRotatedDate: (_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), + Name: import_smithy_client.expectString, + NextRotationDate: (_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), + OwningService: import_smithy_client.expectString, + PrimaryRegion: import_smithy_client.expectString, + RotationEnabled: import_smithy_client.expectBoolean, + RotationLambdaARN: import_smithy_client.expectString, + RotationRules: import_smithy_client._json, + SecretVersionsToStages: import_smithy_client._json, + Tags: import_smithy_client._json + }); +}, "de_SecretListEntry"); +var de_SecretListType = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_SecretListEntry(entry, context); + }); + return retVal; +}, "de_SecretListType"); +var de_SecretValueEntry = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + ARN: import_smithy_client.expectString, + CreatedDate: (_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), + Name: import_smithy_client.expectString, + SecretBinary: context.base64Decoder, + SecretString: import_smithy_client.expectString, + VersionId: import_smithy_client.expectString, + VersionStages: import_smithy_client._json + }); +}, "de_SecretValueEntry"); +var de_SecretValuesType = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_SecretValueEntry(entry, context); + }); + return retVal; +}, "de_SecretValuesType"); +var de_SecretVersionsListEntry = /* @__PURE__ */ __name((output, context) => { + return (0, import_smithy_client.take)(output, { + CreatedDate: (_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), + KmsKeyIds: import_smithy_client._json, + LastAccessedDate: (_) => (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseEpochTimestamp)((0, import_smithy_client.expectNumber)(_))), + VersionId: import_smithy_client.expectString, + VersionStages: import_smithy_client._json + }); +}, "de_SecretVersionsListEntry"); +var de_SecretVersionsListType = /* @__PURE__ */ __name((output, context) => { + const retVal = (output || []).filter((e) => e != null).map((entry) => { + return de_SecretVersionsListEntry(entry, context); + }); + return retVal; +}, "de_SecretVersionsListType"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); +var throwDefaultError = (0, import_smithy_client.withBaseException)(SecretsManagerServiceException); +var buildHttpRpcRequest = /* @__PURE__ */ __name(async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers + }; + if (resolvedHostname !== void 0) { + contents.hostname = resolvedHostname; + } + if (body !== void 0) { + contents.body = body; + } + return new import_protocol_http.HttpRequest(contents); +}, "buildHttpRpcRequest"); +function sharedHeaders(operation) { + return { + "content-type": "application/x-amz-json-1.1", + "x-amz-target": `secretsmanager.${operation}` + }; +} +__name(sharedHeaders, "sharedHeaders"); + +// src/commands/BatchGetSecretValueCommand.ts +var _BatchGetSecretValueCommand = class _BatchGetSecretValueCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "BatchGetSecretValue", {}).n("SecretsManagerClient", "BatchGetSecretValueCommand").f(void 0, BatchGetSecretValueResponseFilterSensitiveLog).ser(se_BatchGetSecretValueCommand).de(de_BatchGetSecretValueCommand).build() { +}; +__name(_BatchGetSecretValueCommand, "BatchGetSecretValueCommand"); +var BatchGetSecretValueCommand = _BatchGetSecretValueCommand; + +// src/commands/CancelRotateSecretCommand.ts + + + +var _CancelRotateSecretCommand = class _CancelRotateSecretCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "CancelRotateSecret", {}).n("SecretsManagerClient", "CancelRotateSecretCommand").f(void 0, void 0).ser(se_CancelRotateSecretCommand).de(de_CancelRotateSecretCommand).build() { +}; +__name(_CancelRotateSecretCommand, "CancelRotateSecretCommand"); +var CancelRotateSecretCommand = _CancelRotateSecretCommand; + +// src/commands/CreateSecretCommand.ts + + + +var _CreateSecretCommand = class _CreateSecretCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "CreateSecret", {}).n("SecretsManagerClient", "CreateSecretCommand").f(CreateSecretRequestFilterSensitiveLog, void 0).ser(se_CreateSecretCommand).de(de_CreateSecretCommand).build() { +}; +__name(_CreateSecretCommand, "CreateSecretCommand"); +var CreateSecretCommand = _CreateSecretCommand; + +// src/commands/DeleteResourcePolicyCommand.ts + + + +var _DeleteResourcePolicyCommand = class _DeleteResourcePolicyCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "DeleteResourcePolicy", {}).n("SecretsManagerClient", "DeleteResourcePolicyCommand").f(void 0, void 0).ser(se_DeleteResourcePolicyCommand).de(de_DeleteResourcePolicyCommand).build() { +}; +__name(_DeleteResourcePolicyCommand, "DeleteResourcePolicyCommand"); +var DeleteResourcePolicyCommand = _DeleteResourcePolicyCommand; + +// src/commands/DeleteSecretCommand.ts + + + +var _DeleteSecretCommand = class _DeleteSecretCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "DeleteSecret", {}).n("SecretsManagerClient", "DeleteSecretCommand").f(void 0, void 0).ser(se_DeleteSecretCommand).de(de_DeleteSecretCommand).build() { +}; +__name(_DeleteSecretCommand, "DeleteSecretCommand"); +var DeleteSecretCommand = _DeleteSecretCommand; + +// src/commands/DescribeSecretCommand.ts + + + +var _DescribeSecretCommand = class _DescribeSecretCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "DescribeSecret", {}).n("SecretsManagerClient", "DescribeSecretCommand").f(void 0, void 0).ser(se_DescribeSecretCommand).de(de_DescribeSecretCommand).build() { +}; +__name(_DescribeSecretCommand, "DescribeSecretCommand"); +var DescribeSecretCommand = _DescribeSecretCommand; + +// src/commands/GetRandomPasswordCommand.ts + + + +var _GetRandomPasswordCommand = class _GetRandomPasswordCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "GetRandomPassword", {}).n("SecretsManagerClient", "GetRandomPasswordCommand").f(void 0, GetRandomPasswordResponseFilterSensitiveLog).ser(se_GetRandomPasswordCommand).de(de_GetRandomPasswordCommand).build() { +}; +__name(_GetRandomPasswordCommand, "GetRandomPasswordCommand"); +var GetRandomPasswordCommand = _GetRandomPasswordCommand; + +// src/commands/GetResourcePolicyCommand.ts + + + +var _GetResourcePolicyCommand = class _GetResourcePolicyCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "GetResourcePolicy", {}).n("SecretsManagerClient", "GetResourcePolicyCommand").f(void 0, void 0).ser(se_GetResourcePolicyCommand).de(de_GetResourcePolicyCommand).build() { +}; +__name(_GetResourcePolicyCommand, "GetResourcePolicyCommand"); +var GetResourcePolicyCommand = _GetResourcePolicyCommand; + +// src/commands/GetSecretValueCommand.ts + + + +var _GetSecretValueCommand = class _GetSecretValueCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "GetSecretValue", {}).n("SecretsManagerClient", "GetSecretValueCommand").f(void 0, GetSecretValueResponseFilterSensitiveLog).ser(se_GetSecretValueCommand).de(de_GetSecretValueCommand).build() { +}; +__name(_GetSecretValueCommand, "GetSecretValueCommand"); +var GetSecretValueCommand = _GetSecretValueCommand; + +// src/commands/ListSecretsCommand.ts + + + +var _ListSecretsCommand = class _ListSecretsCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "ListSecrets", {}).n("SecretsManagerClient", "ListSecretsCommand").f(void 0, void 0).ser(se_ListSecretsCommand).de(de_ListSecretsCommand).build() { +}; +__name(_ListSecretsCommand, "ListSecretsCommand"); +var ListSecretsCommand = _ListSecretsCommand; + +// src/commands/ListSecretVersionIdsCommand.ts + + + +var _ListSecretVersionIdsCommand = class _ListSecretVersionIdsCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "ListSecretVersionIds", {}).n("SecretsManagerClient", "ListSecretVersionIdsCommand").f(void 0, void 0).ser(se_ListSecretVersionIdsCommand).de(de_ListSecretVersionIdsCommand).build() { +}; +__name(_ListSecretVersionIdsCommand, "ListSecretVersionIdsCommand"); +var ListSecretVersionIdsCommand = _ListSecretVersionIdsCommand; + +// src/commands/PutResourcePolicyCommand.ts + + + +var _PutResourcePolicyCommand = class _PutResourcePolicyCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "PutResourcePolicy", {}).n("SecretsManagerClient", "PutResourcePolicyCommand").f(void 0, void 0).ser(se_PutResourcePolicyCommand).de(de_PutResourcePolicyCommand).build() { +}; +__name(_PutResourcePolicyCommand, "PutResourcePolicyCommand"); +var PutResourcePolicyCommand = _PutResourcePolicyCommand; + +// src/commands/PutSecretValueCommand.ts + + + +var _PutSecretValueCommand = class _PutSecretValueCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "PutSecretValue", {}).n("SecretsManagerClient", "PutSecretValueCommand").f(PutSecretValueRequestFilterSensitiveLog, void 0).ser(se_PutSecretValueCommand).de(de_PutSecretValueCommand).build() { +}; +__name(_PutSecretValueCommand, "PutSecretValueCommand"); +var PutSecretValueCommand = _PutSecretValueCommand; + +// src/commands/RemoveRegionsFromReplicationCommand.ts + + + +var _RemoveRegionsFromReplicationCommand = class _RemoveRegionsFromReplicationCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "RemoveRegionsFromReplication", {}).n("SecretsManagerClient", "RemoveRegionsFromReplicationCommand").f(void 0, void 0).ser(se_RemoveRegionsFromReplicationCommand).de(de_RemoveRegionsFromReplicationCommand).build() { +}; +__name(_RemoveRegionsFromReplicationCommand, "RemoveRegionsFromReplicationCommand"); +var RemoveRegionsFromReplicationCommand = _RemoveRegionsFromReplicationCommand; + +// src/commands/ReplicateSecretToRegionsCommand.ts + + + +var _ReplicateSecretToRegionsCommand = class _ReplicateSecretToRegionsCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "ReplicateSecretToRegions", {}).n("SecretsManagerClient", "ReplicateSecretToRegionsCommand").f(void 0, void 0).ser(se_ReplicateSecretToRegionsCommand).de(de_ReplicateSecretToRegionsCommand).build() { +}; +__name(_ReplicateSecretToRegionsCommand, "ReplicateSecretToRegionsCommand"); +var ReplicateSecretToRegionsCommand = _ReplicateSecretToRegionsCommand; + +// src/commands/RestoreSecretCommand.ts + + + +var _RestoreSecretCommand = class _RestoreSecretCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "RestoreSecret", {}).n("SecretsManagerClient", "RestoreSecretCommand").f(void 0, void 0).ser(se_RestoreSecretCommand).de(de_RestoreSecretCommand).build() { +}; +__name(_RestoreSecretCommand, "RestoreSecretCommand"); +var RestoreSecretCommand = _RestoreSecretCommand; + +// src/commands/RotateSecretCommand.ts + + + +var _RotateSecretCommand = class _RotateSecretCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "RotateSecret", {}).n("SecretsManagerClient", "RotateSecretCommand").f(void 0, void 0).ser(se_RotateSecretCommand).de(de_RotateSecretCommand).build() { +}; +__name(_RotateSecretCommand, "RotateSecretCommand"); +var RotateSecretCommand = _RotateSecretCommand; + +// src/commands/StopReplicationToReplicaCommand.ts + + + +var _StopReplicationToReplicaCommand = class _StopReplicationToReplicaCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "StopReplicationToReplica", {}).n("SecretsManagerClient", "StopReplicationToReplicaCommand").f(void 0, void 0).ser(se_StopReplicationToReplicaCommand).de(de_StopReplicationToReplicaCommand).build() { +}; +__name(_StopReplicationToReplicaCommand, "StopReplicationToReplicaCommand"); +var StopReplicationToReplicaCommand = _StopReplicationToReplicaCommand; + +// src/commands/TagResourceCommand.ts + + + +var _TagResourceCommand = class _TagResourceCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "TagResource", {}).n("SecretsManagerClient", "TagResourceCommand").f(void 0, void 0).ser(se_TagResourceCommand).de(de_TagResourceCommand).build() { +}; +__name(_TagResourceCommand, "TagResourceCommand"); +var TagResourceCommand = _TagResourceCommand; + +// src/commands/UntagResourceCommand.ts + + + +var _UntagResourceCommand = class _UntagResourceCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "UntagResource", {}).n("SecretsManagerClient", "UntagResourceCommand").f(void 0, void 0).ser(se_UntagResourceCommand).de(de_UntagResourceCommand).build() { +}; +__name(_UntagResourceCommand, "UntagResourceCommand"); +var UntagResourceCommand = _UntagResourceCommand; + +// src/commands/UpdateSecretCommand.ts + + + +var _UpdateSecretCommand = class _UpdateSecretCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "UpdateSecret", {}).n("SecretsManagerClient", "UpdateSecretCommand").f(UpdateSecretRequestFilterSensitiveLog, void 0).ser(se_UpdateSecretCommand).de(de_UpdateSecretCommand).build() { +}; +__name(_UpdateSecretCommand, "UpdateSecretCommand"); +var UpdateSecretCommand = _UpdateSecretCommand; + +// src/commands/UpdateSecretVersionStageCommand.ts + + + +var _UpdateSecretVersionStageCommand = class _UpdateSecretVersionStageCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "UpdateSecretVersionStage", {}).n("SecretsManagerClient", "UpdateSecretVersionStageCommand").f(void 0, void 0).ser(se_UpdateSecretVersionStageCommand).de(de_UpdateSecretVersionStageCommand).build() { +}; +__name(_UpdateSecretVersionStageCommand, "UpdateSecretVersionStageCommand"); +var UpdateSecretVersionStageCommand = _UpdateSecretVersionStageCommand; + +// src/commands/ValidateResourcePolicyCommand.ts + + + +var _ValidateResourcePolicyCommand = class _ValidateResourcePolicyCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("secretsmanager", "ValidateResourcePolicy", {}).n("SecretsManagerClient", "ValidateResourcePolicyCommand").f(void 0, void 0).ser(se_ValidateResourcePolicyCommand).de(de_ValidateResourcePolicyCommand).build() { +}; +__name(_ValidateResourcePolicyCommand, "ValidateResourcePolicyCommand"); +var ValidateResourcePolicyCommand = _ValidateResourcePolicyCommand; + +// src/SecretsManager.ts +var commands = { + BatchGetSecretValueCommand, + CancelRotateSecretCommand, + CreateSecretCommand, + DeleteResourcePolicyCommand, + DeleteSecretCommand, + DescribeSecretCommand, + GetRandomPasswordCommand, + GetResourcePolicyCommand, + GetSecretValueCommand, + ListSecretsCommand, + ListSecretVersionIdsCommand, + PutResourcePolicyCommand, + PutSecretValueCommand, + RemoveRegionsFromReplicationCommand, + ReplicateSecretToRegionsCommand, + RestoreSecretCommand, + RotateSecretCommand, + StopReplicationToReplicaCommand, + TagResourceCommand, + UntagResourceCommand, + UpdateSecretCommand, + UpdateSecretVersionStageCommand, + ValidateResourcePolicyCommand +}; +var _SecretsManager = class _SecretsManager extends SecretsManagerClient { +}; +__name(_SecretsManager, "SecretsManager"); +var SecretsManager = _SecretsManager; +(0, import_smithy_client.createAggregatedClient)(commands, SecretsManager); + +// src/pagination/BatchGetSecretValuePaginator.ts + +var paginateBatchGetSecretValue = (0, import_core.createPaginator)(SecretsManagerClient, BatchGetSecretValueCommand, "NextToken", "NextToken", "MaxResults"); + +// src/pagination/ListSecretVersionIdsPaginator.ts + +var paginateListSecretVersionIds = (0, import_core.createPaginator)(SecretsManagerClient, ListSecretVersionIdsCommand, "NextToken", "NextToken", "MaxResults"); + +// src/pagination/ListSecretsPaginator.ts + +var paginateListSecrets = (0, import_core.createPaginator)(SecretsManagerClient, ListSecretsCommand, "NextToken", "NextToken", "MaxResults"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 2580: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRuntimeConfig = void 0; +const tslib_1 = __nccwpck_require__(1860); +const package_json_1 = tslib_1.__importDefault(__nccwpck_require__(4456)); +const core_1 = __nccwpck_require__(8704); +const credential_provider_node_1 = __nccwpck_require__(5861); +const util_user_agent_node_1 = __nccwpck_require__(1656); +const config_resolver_1 = __nccwpck_require__(9316); +const hash_node_1 = __nccwpck_require__(5092); +const middleware_retry_1 = __nccwpck_require__(9618); +const node_config_provider_1 = __nccwpck_require__(5704); +const node_http_handler_1 = __nccwpck_require__(1279); +const util_body_length_node_1 = __nccwpck_require__(3638); +const util_retry_1 = __nccwpck_require__(5518); +const runtimeConfig_shared_1 = __nccwpck_require__(7605); +const smithy_client_1 = __nccwpck_require__(1411); +const util_defaults_mode_node_1 = __nccwpck_require__(5435); +const smithy_client_2 = __nccwpck_require__(1411); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + (0, core_1.emitWarningIfUnsupportedVersion)(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? credential_provider_node_1.defaultProvider, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, { ...config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: node_http_handler_1.NodeHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? (0, node_config_provider_1.loadConfig)(util_user_agent_node_1.NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; + + +/***/ }), + +/***/ 7605: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRuntimeConfig = void 0; +const core_1 = __nccwpck_require__(8704); +const smithy_client_1 = __nccwpck_require__(1411); +const url_parser_1 = __nccwpck_require__(4494); +const util_base64_1 = __nccwpck_require__(8385); +const util_utf8_1 = __nccwpck_require__(1577); +const httpAuthSchemeProvider_1 = __nccwpck_require__(6493); +const endpointResolver_1 = __nccwpck_require__(3267); +const getRuntimeConfig = (config) => { + return { + apiVersion: "2017-10-17", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? httpAuthSchemeProvider_1.defaultSecretsManagerHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new core_1.AwsSdkSigV4Signer(), + }, + ], + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "Secrets Manager", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; + + +/***/ }), + +/***/ 5295: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveHttpAuthSchemeConfig = exports.defaultSSOOIDCHttpAuthSchemeProvider = exports.defaultSSOOIDCHttpAuthSchemeParametersProvider = void 0; +const core_1 = __nccwpck_require__(8704); +const util_middleware_1 = __nccwpck_require__(6324); +const defaultSSOOIDCHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: (0, util_middleware_1.getSmithyContext)(context).operation, + region: (await (0, util_middleware_1.normalizeProvider)(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +exports.defaultSSOOIDCHttpAuthSchemeParametersProvider = defaultSSOOIDCHttpAuthSchemeParametersProvider; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "sso-oauth", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +const defaultSSOOIDCHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "CreateToken": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "RegisterClient": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "StartDeviceAuthorization": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +exports.defaultSSOOIDCHttpAuthSchemeProvider = defaultSSOOIDCHttpAuthSchemeProvider; +const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = (0, core_1.resolveAwsSdkSigV4Config)(config); + return { + ...config_0, + }; +}; +exports.resolveHttpAuthSchemeConfig = resolveHttpAuthSchemeConfig; + + +/***/ }), + +/***/ 8569: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = __nccwpck_require__(3068); +const util_endpoints_2 = __nccwpck_require__(9674); +const ruleset_1 = __nccwpck_require__(8722); +const cache = new util_endpoints_2.EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS"], +}); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => (0, util_endpoints_2.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; +util_endpoints_2.customEndpointFunctions.aws = util_endpoints_1.awsEndpointFunctions; + + +/***/ }), + +/***/ 8722: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ruleSet = void 0; +const u = "required", v = "fn", w = "argv", x = "ref"; +const a = true, b = "isSet", c = "booleanEquals", d = "error", e = "endpoint", f = "tree", g = "PartitionResult", h = "getAttr", i = { [u]: false, "type": "String" }, j = { [u]: true, "default": false, "type": "Boolean" }, k = { [x]: "Endpoint" }, l = { [v]: c, [w]: [{ [x]: "UseFIPS" }, true] }, m = { [v]: c, [w]: [{ [x]: "UseDualStack" }, true] }, n = {}, o = { [v]: h, [w]: [{ [x]: g }, "supportsFIPS"] }, p = { [x]: g }, q = { [v]: c, [w]: [true, { [v]: h, [w]: [p, "supportsDualStack"] }] }, r = [l], s = [m], t = [{ [x]: "Region" }]; +const _data = { version: "1.0", parameters: { Region: i, UseDualStack: j, UseFIPS: j, Endpoint: i }, rules: [{ conditions: [{ [v]: b, [w]: [k] }], rules: [{ conditions: r, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: d }, { conditions: s, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: d }, { endpoint: { url: k, properties: n, headers: n }, type: e }], type: f }, { conditions: [{ [v]: b, [w]: t }], rules: [{ conditions: [{ [v]: "aws.partition", [w]: t, assign: g }], rules: [{ conditions: [l, m], rules: [{ conditions: [{ [v]: c, [w]: [a, o] }, q], rules: [{ endpoint: { url: "https://oidc-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: d }], type: f }, { conditions: r, rules: [{ conditions: [{ [v]: c, [w]: [o, a] }], rules: [{ conditions: [{ [v]: "stringEquals", [w]: [{ [v]: h, [w]: [p, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://oidc.{Region}.amazonaws.com", properties: n, headers: n }, type: e }, { endpoint: { url: "https://oidc-fips.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS is enabled but this partition does not support FIPS", type: d }], type: f }, { conditions: s, rules: [{ conditions: [q], rules: [{ endpoint: { url: "https://oidc.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "DualStack is enabled but this partition does not support DualStack", type: d }], type: f }, { endpoint: { url: "https://oidc.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }], type: f }, { error: "Invalid Configuration: Missing Region", type: d }] }; +exports.ruleSet = _data; + + +/***/ }), + +/***/ 9260: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AccessDeniedException: () => AccessDeniedException, + AuthorizationPendingException: () => AuthorizationPendingException, + CreateTokenCommand: () => CreateTokenCommand, + CreateTokenRequestFilterSensitiveLog: () => CreateTokenRequestFilterSensitiveLog, + CreateTokenResponseFilterSensitiveLog: () => CreateTokenResponseFilterSensitiveLog, + CreateTokenWithIAMCommand: () => CreateTokenWithIAMCommand, + CreateTokenWithIAMRequestFilterSensitiveLog: () => CreateTokenWithIAMRequestFilterSensitiveLog, + CreateTokenWithIAMResponseFilterSensitiveLog: () => CreateTokenWithIAMResponseFilterSensitiveLog, + ExpiredTokenException: () => ExpiredTokenException, + InternalServerException: () => InternalServerException, + InvalidClientException: () => InvalidClientException, + InvalidClientMetadataException: () => InvalidClientMetadataException, + InvalidGrantException: () => InvalidGrantException, + InvalidRedirectUriException: () => InvalidRedirectUriException, + InvalidRequestException: () => InvalidRequestException, + InvalidRequestRegionException: () => InvalidRequestRegionException, + InvalidScopeException: () => InvalidScopeException, + RegisterClientCommand: () => RegisterClientCommand, + RegisterClientResponseFilterSensitiveLog: () => RegisterClientResponseFilterSensitiveLog, + SSOOIDC: () => SSOOIDC, + SSOOIDCClient: () => SSOOIDCClient, + SSOOIDCServiceException: () => SSOOIDCServiceException, + SlowDownException: () => SlowDownException, + StartDeviceAuthorizationCommand: () => StartDeviceAuthorizationCommand, + StartDeviceAuthorizationRequestFilterSensitiveLog: () => StartDeviceAuthorizationRequestFilterSensitiveLog, + UnauthorizedClientException: () => UnauthorizedClientException, + UnsupportedGrantTypeException: () => UnsupportedGrantTypeException, + __Client: () => import_smithy_client.Client +}); +module.exports = __toCommonJS(src_exports); + +// src/SSOOIDCClient.ts +var import_middleware_host_header = __nccwpck_require__(2590); +var import_middleware_logger = __nccwpck_require__(5242); +var import_middleware_recursion_detection = __nccwpck_require__(1568); +var import_middleware_user_agent = __nccwpck_require__(2959); +var import_config_resolver = __nccwpck_require__(9316); +var import_core = __nccwpck_require__(402); +var import_middleware_content_length = __nccwpck_require__(7212); +var import_middleware_endpoint = __nccwpck_require__(99); +var import_middleware_retry = __nccwpck_require__(9618); + +var import_httpAuthSchemeProvider = __nccwpck_require__(5295); + +// src/endpoint/EndpointParameters.ts +var resolveClientEndpointParameters = /* @__PURE__ */ __name((options) => { + return { + ...options, + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "sso-oauth" + }; +}, "resolveClientEndpointParameters"); +var commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" } +}; + +// src/SSOOIDCClient.ts +var import_runtimeConfig = __nccwpck_require__(7918); + +// src/runtimeExtensions.ts +var import_region_config_resolver = __nccwpck_require__(6463); +var import_protocol_http = __nccwpck_require__(2356); +var import_smithy_client = __nccwpck_require__(1411); + +// src/auth/httpAuthExtensionConfiguration.ts +var getHttpAuthExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + } + }; +}, "getHttpAuthExtensionConfiguration"); +var resolveHttpAuthRuntimeConfig = /* @__PURE__ */ __name((config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials() + }; +}, "resolveHttpAuthRuntimeConfig"); + +// src/runtimeExtensions.ts +var asPartial = /* @__PURE__ */ __name((t) => t, "asPartial"); +var resolveRuntimeExtensions = /* @__PURE__ */ __name((runtimeConfig, extensions) => { + const extensionConfiguration = { + ...asPartial((0, import_region_config_resolver.getAwsRegionExtensionConfiguration)(runtimeConfig)), + ...asPartial((0, import_smithy_client.getDefaultExtensionConfiguration)(runtimeConfig)), + ...asPartial((0, import_protocol_http.getHttpHandlerExtensionConfiguration)(runtimeConfig)), + ...asPartial(getHttpAuthExtensionConfiguration(runtimeConfig)) + }; + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return { + ...runtimeConfig, + ...(0, import_region_config_resolver.resolveAwsRegionExtensionConfiguration)(extensionConfiguration), + ...(0, import_smithy_client.resolveDefaultRuntimeConfig)(extensionConfiguration), + ...(0, import_protocol_http.resolveHttpHandlerRuntimeConfig)(extensionConfiguration), + ...resolveHttpAuthRuntimeConfig(extensionConfiguration) + }; +}, "resolveRuntimeExtensions"); + +// src/SSOOIDCClient.ts +var _SSOOIDCClient = class _SSOOIDCClient extends import_smithy_client.Client { + constructor(...[configuration]) { + const _config_0 = (0, import_runtimeConfig.getRuntimeConfig)(configuration || {}); + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = (0, import_middleware_user_agent.resolveUserAgentConfig)(_config_1); + const _config_3 = (0, import_middleware_retry.resolveRetryConfig)(_config_2); + const _config_4 = (0, import_config_resolver.resolveRegionConfig)(_config_3); + const _config_5 = (0, import_middleware_host_header.resolveHostHeaderConfig)(_config_4); + const _config_6 = (0, import_middleware_endpoint.resolveEndpointConfig)(_config_5); + const _config_7 = (0, import_httpAuthSchemeProvider.resolveHttpAuthSchemeConfig)(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, (configuration == null ? void 0 : configuration.extensions) || []); + super(_config_8); + this.config = _config_8; + this.middlewareStack.use((0, import_middleware_user_agent.getUserAgentPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_retry.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_content_length.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_host_header.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_logger.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_recursion_detection.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use( + (0, import_core.getHttpAuthSchemeEndpointRuleSetPlugin)(this.config, { + httpAuthSchemeParametersProvider: import_httpAuthSchemeProvider.defaultSSOOIDCHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new import_core.DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials + }) + }) + ); + this.middlewareStack.use((0, import_core.getHttpSigningPlugin)(this.config)); + } + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy() { + super.destroy(); + } +}; +__name(_SSOOIDCClient, "SSOOIDCClient"); +var SSOOIDCClient = _SSOOIDCClient; + +// src/SSOOIDC.ts + + +// src/commands/CreateTokenCommand.ts + +var import_middleware_serde = __nccwpck_require__(3255); + + +// src/models/models_0.ts + + +// src/models/SSOOIDCServiceException.ts + +var _SSOOIDCServiceException = class _SSOOIDCServiceException extends import_smithy_client.ServiceException { + /** + * @internal + */ + constructor(options) { + super(options); + Object.setPrototypeOf(this, _SSOOIDCServiceException.prototype); + } +}; +__name(_SSOOIDCServiceException, "SSOOIDCServiceException"); +var SSOOIDCServiceException = _SSOOIDCServiceException; + +// src/models/models_0.ts +var _AccessDeniedException = class _AccessDeniedException extends SSOOIDCServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "AccessDeniedException", + $fault: "client", + ...opts + }); + this.name = "AccessDeniedException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _AccessDeniedException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +__name(_AccessDeniedException, "AccessDeniedException"); +var AccessDeniedException = _AccessDeniedException; +var _AuthorizationPendingException = class _AuthorizationPendingException extends SSOOIDCServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "AuthorizationPendingException", + $fault: "client", + ...opts + }); + this.name = "AuthorizationPendingException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _AuthorizationPendingException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +__name(_AuthorizationPendingException, "AuthorizationPendingException"); +var AuthorizationPendingException = _AuthorizationPendingException; +var _ExpiredTokenException = class _ExpiredTokenException extends SSOOIDCServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "ExpiredTokenException", + $fault: "client", + ...opts + }); + this.name = "ExpiredTokenException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _ExpiredTokenException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +__name(_ExpiredTokenException, "ExpiredTokenException"); +var ExpiredTokenException = _ExpiredTokenException; +var _InternalServerException = class _InternalServerException extends SSOOIDCServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "InternalServerException", + $fault: "server", + ...opts + }); + this.name = "InternalServerException"; + this.$fault = "server"; + Object.setPrototypeOf(this, _InternalServerException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +__name(_InternalServerException, "InternalServerException"); +var InternalServerException = _InternalServerException; +var _InvalidClientException = class _InvalidClientException extends SSOOIDCServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidClientException", + $fault: "client", + ...opts + }); + this.name = "InvalidClientException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidClientException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +__name(_InvalidClientException, "InvalidClientException"); +var InvalidClientException = _InvalidClientException; +var _InvalidGrantException = class _InvalidGrantException extends SSOOIDCServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidGrantException", + $fault: "client", + ...opts + }); + this.name = "InvalidGrantException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidGrantException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +__name(_InvalidGrantException, "InvalidGrantException"); +var InvalidGrantException = _InvalidGrantException; +var _InvalidRequestException = class _InvalidRequestException extends SSOOIDCServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts + }); + this.name = "InvalidRequestException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidRequestException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +__name(_InvalidRequestException, "InvalidRequestException"); +var InvalidRequestException = _InvalidRequestException; +var _InvalidScopeException = class _InvalidScopeException extends SSOOIDCServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidScopeException", + $fault: "client", + ...opts + }); + this.name = "InvalidScopeException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidScopeException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +__name(_InvalidScopeException, "InvalidScopeException"); +var InvalidScopeException = _InvalidScopeException; +var _SlowDownException = class _SlowDownException extends SSOOIDCServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "SlowDownException", + $fault: "client", + ...opts + }); + this.name = "SlowDownException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _SlowDownException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +__name(_SlowDownException, "SlowDownException"); +var SlowDownException = _SlowDownException; +var _UnauthorizedClientException = class _UnauthorizedClientException extends SSOOIDCServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "UnauthorizedClientException", + $fault: "client", + ...opts + }); + this.name = "UnauthorizedClientException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _UnauthorizedClientException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +__name(_UnauthorizedClientException, "UnauthorizedClientException"); +var UnauthorizedClientException = _UnauthorizedClientException; +var _UnsupportedGrantTypeException = class _UnsupportedGrantTypeException extends SSOOIDCServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "UnsupportedGrantTypeException", + $fault: "client", + ...opts + }); + this.name = "UnsupportedGrantTypeException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _UnsupportedGrantTypeException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +__name(_UnsupportedGrantTypeException, "UnsupportedGrantTypeException"); +var UnsupportedGrantTypeException = _UnsupportedGrantTypeException; +var _InvalidRequestRegionException = class _InvalidRequestRegionException extends SSOOIDCServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidRequestRegionException", + $fault: "client", + ...opts + }); + this.name = "InvalidRequestRegionException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidRequestRegionException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + this.endpoint = opts.endpoint; + this.region = opts.region; + } +}; +__name(_InvalidRequestRegionException, "InvalidRequestRegionException"); +var InvalidRequestRegionException = _InvalidRequestRegionException; +var _InvalidClientMetadataException = class _InvalidClientMetadataException extends SSOOIDCServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidClientMetadataException", + $fault: "client", + ...opts + }); + this.name = "InvalidClientMetadataException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidClientMetadataException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +__name(_InvalidClientMetadataException, "InvalidClientMetadataException"); +var InvalidClientMetadataException = _InvalidClientMetadataException; +var _InvalidRedirectUriException = class _InvalidRedirectUriException extends SSOOIDCServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidRedirectUriException", + $fault: "client", + ...opts + }); + this.name = "InvalidRedirectUriException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidRedirectUriException.prototype); + this.error = opts.error; + this.error_description = opts.error_description; + } +}; +__name(_InvalidRedirectUriException, "InvalidRedirectUriException"); +var InvalidRedirectUriException = _InvalidRedirectUriException; +var CreateTokenRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.clientSecret && { clientSecret: import_smithy_client.SENSITIVE_STRING }, + ...obj.refreshToken && { refreshToken: import_smithy_client.SENSITIVE_STRING }, + ...obj.codeVerifier && { codeVerifier: import_smithy_client.SENSITIVE_STRING } +}), "CreateTokenRequestFilterSensitiveLog"); +var CreateTokenResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client.SENSITIVE_STRING }, + ...obj.refreshToken && { refreshToken: import_smithy_client.SENSITIVE_STRING }, + ...obj.idToken && { idToken: import_smithy_client.SENSITIVE_STRING } +}), "CreateTokenResponseFilterSensitiveLog"); +var CreateTokenWithIAMRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.refreshToken && { refreshToken: import_smithy_client.SENSITIVE_STRING }, + ...obj.assertion && { assertion: import_smithy_client.SENSITIVE_STRING }, + ...obj.subjectToken && { subjectToken: import_smithy_client.SENSITIVE_STRING }, + ...obj.codeVerifier && { codeVerifier: import_smithy_client.SENSITIVE_STRING } +}), "CreateTokenWithIAMRequestFilterSensitiveLog"); +var CreateTokenWithIAMResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client.SENSITIVE_STRING }, + ...obj.refreshToken && { refreshToken: import_smithy_client.SENSITIVE_STRING }, + ...obj.idToken && { idToken: import_smithy_client.SENSITIVE_STRING } +}), "CreateTokenWithIAMResponseFilterSensitiveLog"); +var RegisterClientResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.clientSecret && { clientSecret: import_smithy_client.SENSITIVE_STRING } +}), "RegisterClientResponseFilterSensitiveLog"); +var StartDeviceAuthorizationRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.clientSecret && { clientSecret: import_smithy_client.SENSITIVE_STRING } +}), "StartDeviceAuthorizationRequestFilterSensitiveLog"); + +// src/protocols/Aws_restJson1.ts +var import_core2 = __nccwpck_require__(8704); + + +var se_CreateTokenCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = { + "content-type": "application/json" + }; + b.bp("/token"); + let body; + body = JSON.stringify( + (0, import_smithy_client.take)(input, { + clientId: [], + clientSecret: [], + code: [], + codeVerifier: [], + deviceCode: [], + grantType: [], + redirectUri: [], + refreshToken: [], + scope: (_) => (0, import_smithy_client._json)(_) + }) + ); + b.m("POST").h(headers).b(body); + return b.build(); +}, "se_CreateTokenCommand"); +var se_CreateTokenWithIAMCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = { + "content-type": "application/json" + }; + b.bp("/token"); + const query = (0, import_smithy_client.map)({ + [_ai]: [, "t"] + }); + let body; + body = JSON.stringify( + (0, import_smithy_client.take)(input, { + assertion: [], + clientId: [], + code: [], + codeVerifier: [], + grantType: [], + redirectUri: [], + refreshToken: [], + requestedTokenType: [], + scope: (_) => (0, import_smithy_client._json)(_), + subjectToken: [], + subjectTokenType: [] + }) + ); + b.m("POST").h(headers).q(query).b(body); + return b.build(); +}, "se_CreateTokenWithIAMCommand"); +var se_RegisterClientCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = { + "content-type": "application/json" + }; + b.bp("/client/register"); + let body; + body = JSON.stringify( + (0, import_smithy_client.take)(input, { + clientName: [], + clientType: [], + entitledApplicationArn: [], + grantTypes: (_) => (0, import_smithy_client._json)(_), + issuerUrl: [], + redirectUris: (_) => (0, import_smithy_client._json)(_), + scopes: (_) => (0, import_smithy_client._json)(_) + }) + ); + b.m("POST").h(headers).b(body); + return b.build(); +}, "se_RegisterClientCommand"); +var se_StartDeviceAuthorizationCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = { + "content-type": "application/json" + }; + b.bp("/device_authorization"); + let body; + body = JSON.stringify( + (0, import_smithy_client.take)(input, { + clientId: [], + clientSecret: [], + startUrl: [] + }) + ); + b.m("POST").h(headers).b(body); + return b.build(); +}, "se_StartDeviceAuthorizationCommand"); +var de_CreateTokenCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client.take)(data, { + accessToken: import_smithy_client.expectString, + expiresIn: import_smithy_client.expectInt32, + idToken: import_smithy_client.expectString, + refreshToken: import_smithy_client.expectString, + tokenType: import_smithy_client.expectString + }); + Object.assign(contents, doc); + return contents; +}, "de_CreateTokenCommand"); +var de_CreateTokenWithIAMCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client.take)(data, { + accessToken: import_smithy_client.expectString, + expiresIn: import_smithy_client.expectInt32, + idToken: import_smithy_client.expectString, + issuedTokenType: import_smithy_client.expectString, + refreshToken: import_smithy_client.expectString, + scope: import_smithy_client._json, + tokenType: import_smithy_client.expectString + }); + Object.assign(contents, doc); + return contents; +}, "de_CreateTokenWithIAMCommand"); +var de_RegisterClientCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client.take)(data, { + authorizationEndpoint: import_smithy_client.expectString, + clientId: import_smithy_client.expectString, + clientIdIssuedAt: import_smithy_client.expectLong, + clientSecret: import_smithy_client.expectString, + clientSecretExpiresAt: import_smithy_client.expectLong, + tokenEndpoint: import_smithy_client.expectString + }); + Object.assign(contents, doc); + return contents; +}, "de_RegisterClientCommand"); +var de_StartDeviceAuthorizationCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client.take)(data, { + deviceCode: import_smithy_client.expectString, + expiresIn: import_smithy_client.expectInt32, + interval: import_smithy_client.expectInt32, + userCode: import_smithy_client.expectString, + verificationUri: import_smithy_client.expectString, + verificationUriComplete: import_smithy_client.expectString + }); + Object.assign(contents, doc); + return contents; +}, "de_StartDeviceAuthorizationCommand"); +var de_CommandError = /* @__PURE__ */ __name(async (output, context) => { + const parsedOutput = { + ...output, + body: await (0, import_core2.parseJsonErrorBody)(output.body, context) + }; + const errorCode = (0, import_core2.loadRestJsonErrorCode)(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.ssooidc#AccessDeniedException": + throw await de_AccessDeniedExceptionRes(parsedOutput, context); + case "AuthorizationPendingException": + case "com.amazonaws.ssooidc#AuthorizationPendingException": + throw await de_AuthorizationPendingExceptionRes(parsedOutput, context); + case "ExpiredTokenException": + case "com.amazonaws.ssooidc#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "InternalServerException": + case "com.amazonaws.ssooidc#InternalServerException": + throw await de_InternalServerExceptionRes(parsedOutput, context); + case "InvalidClientException": + case "com.amazonaws.ssooidc#InvalidClientException": + throw await de_InvalidClientExceptionRes(parsedOutput, context); + case "InvalidGrantException": + case "com.amazonaws.ssooidc#InvalidGrantException": + throw await de_InvalidGrantExceptionRes(parsedOutput, context); + case "InvalidRequestException": + case "com.amazonaws.ssooidc#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "InvalidScopeException": + case "com.amazonaws.ssooidc#InvalidScopeException": + throw await de_InvalidScopeExceptionRes(parsedOutput, context); + case "SlowDownException": + case "com.amazonaws.ssooidc#SlowDownException": + throw await de_SlowDownExceptionRes(parsedOutput, context); + case "UnauthorizedClientException": + case "com.amazonaws.ssooidc#UnauthorizedClientException": + throw await de_UnauthorizedClientExceptionRes(parsedOutput, context); + case "UnsupportedGrantTypeException": + case "com.amazonaws.ssooidc#UnsupportedGrantTypeException": + throw await de_UnsupportedGrantTypeExceptionRes(parsedOutput, context); + case "InvalidRequestRegionException": + case "com.amazonaws.ssooidc#InvalidRequestRegionException": + throw await de_InvalidRequestRegionExceptionRes(parsedOutput, context); + case "InvalidClientMetadataException": + case "com.amazonaws.ssooidc#InvalidClientMetadataException": + throw await de_InvalidClientMetadataExceptionRes(parsedOutput, context); + case "InvalidRedirectUriException": + case "com.amazonaws.ssooidc#InvalidRedirectUriException": + throw await de_InvalidRedirectUriExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode + }); + } +}, "de_CommandError"); +var throwDefaultError = (0, import_smithy_client.withBaseException)(SSOOIDCServiceException); +var de_AccessDeniedExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + error: import_smithy_client.expectString, + error_description: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new AccessDeniedException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_AccessDeniedExceptionRes"); +var de_AuthorizationPendingExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + error: import_smithy_client.expectString, + error_description: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new AuthorizationPendingException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_AuthorizationPendingExceptionRes"); +var de_ExpiredTokenExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + error: import_smithy_client.expectString, + error_description: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new ExpiredTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_ExpiredTokenExceptionRes"); +var de_InternalServerExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + error: import_smithy_client.expectString, + error_description: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new InternalServerException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_InternalServerExceptionRes"); +var de_InvalidClientExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + error: import_smithy_client.expectString, + error_description: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidClientException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidClientExceptionRes"); +var de_InvalidClientMetadataExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + error: import_smithy_client.expectString, + error_description: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidClientMetadataException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidClientMetadataExceptionRes"); +var de_InvalidGrantExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + error: import_smithy_client.expectString, + error_description: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidGrantException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidGrantExceptionRes"); +var de_InvalidRedirectUriExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + error: import_smithy_client.expectString, + error_description: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidRedirectUriException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidRedirectUriExceptionRes"); +var de_InvalidRequestExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + error: import_smithy_client.expectString, + error_description: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidRequestExceptionRes"); +var de_InvalidRequestRegionExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + endpoint: import_smithy_client.expectString, + error: import_smithy_client.expectString, + error_description: import_smithy_client.expectString, + region: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidRequestRegionException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidRequestRegionExceptionRes"); +var de_InvalidScopeExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + error: import_smithy_client.expectString, + error_description: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidScopeException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidScopeExceptionRes"); +var de_SlowDownExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + error: import_smithy_client.expectString, + error_description: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new SlowDownException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_SlowDownExceptionRes"); +var de_UnauthorizedClientExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + error: import_smithy_client.expectString, + error_description: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new UnauthorizedClientException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_UnauthorizedClientExceptionRes"); +var de_UnsupportedGrantTypeExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + error: import_smithy_client.expectString, + error_description: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new UnsupportedGrantTypeException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_UnsupportedGrantTypeExceptionRes"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); +var _ai = "aws_iam"; + +// src/commands/CreateTokenCommand.ts +var _CreateTokenCommand = class _CreateTokenCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSSOOIDCService", "CreateToken", {}).n("SSOOIDCClient", "CreateTokenCommand").f(CreateTokenRequestFilterSensitiveLog, CreateTokenResponseFilterSensitiveLog).ser(se_CreateTokenCommand).de(de_CreateTokenCommand).build() { +}; +__name(_CreateTokenCommand, "CreateTokenCommand"); +var CreateTokenCommand = _CreateTokenCommand; + +// src/commands/CreateTokenWithIAMCommand.ts + + + +var _CreateTokenWithIAMCommand = class _CreateTokenWithIAMCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSSOOIDCService", "CreateTokenWithIAM", {}).n("SSOOIDCClient", "CreateTokenWithIAMCommand").f(CreateTokenWithIAMRequestFilterSensitiveLog, CreateTokenWithIAMResponseFilterSensitiveLog).ser(se_CreateTokenWithIAMCommand).de(de_CreateTokenWithIAMCommand).build() { +}; +__name(_CreateTokenWithIAMCommand, "CreateTokenWithIAMCommand"); +var CreateTokenWithIAMCommand = _CreateTokenWithIAMCommand; + +// src/commands/RegisterClientCommand.ts + + + +var _RegisterClientCommand = class _RegisterClientCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSSOOIDCService", "RegisterClient", {}).n("SSOOIDCClient", "RegisterClientCommand").f(void 0, RegisterClientResponseFilterSensitiveLog).ser(se_RegisterClientCommand).de(de_RegisterClientCommand).build() { +}; +__name(_RegisterClientCommand, "RegisterClientCommand"); +var RegisterClientCommand = _RegisterClientCommand; + +// src/commands/StartDeviceAuthorizationCommand.ts + + + +var _StartDeviceAuthorizationCommand = class _StartDeviceAuthorizationCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSSOOIDCService", "StartDeviceAuthorization", {}).n("SSOOIDCClient", "StartDeviceAuthorizationCommand").f(StartDeviceAuthorizationRequestFilterSensitiveLog, void 0).ser(se_StartDeviceAuthorizationCommand).de(de_StartDeviceAuthorizationCommand).build() { +}; +__name(_StartDeviceAuthorizationCommand, "StartDeviceAuthorizationCommand"); +var StartDeviceAuthorizationCommand = _StartDeviceAuthorizationCommand; + +// src/SSOOIDC.ts +var commands = { + CreateTokenCommand, + CreateTokenWithIAMCommand, + RegisterClientCommand, + StartDeviceAuthorizationCommand +}; +var _SSOOIDC = class _SSOOIDC extends SSOOIDCClient { +}; +__name(_SSOOIDC, "SSOOIDC"); +var SSOOIDC = _SSOOIDC; +(0, import_smithy_client.createAggregatedClient)(commands, SSOOIDC); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 7918: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRuntimeConfig = void 0; +const tslib_1 = __nccwpck_require__(1860); +const package_json_1 = tslib_1.__importDefault(__nccwpck_require__(9412)); +const core_1 = __nccwpck_require__(8704); +const credential_provider_node_1 = __nccwpck_require__(5861); +const util_user_agent_node_1 = __nccwpck_require__(1656); +const config_resolver_1 = __nccwpck_require__(9316); +const hash_node_1 = __nccwpck_require__(5092); +const middleware_retry_1 = __nccwpck_require__(9618); +const node_config_provider_1 = __nccwpck_require__(5704); +const node_http_handler_1 = __nccwpck_require__(1279); +const util_body_length_node_1 = __nccwpck_require__(3638); +const util_retry_1 = __nccwpck_require__(5518); +const runtimeConfig_shared_1 = __nccwpck_require__(5047); +const smithy_client_1 = __nccwpck_require__(1411); +const util_defaults_mode_node_1 = __nccwpck_require__(5435); +const smithy_client_2 = __nccwpck_require__(1411); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + (0, core_1.emitWarningIfUnsupportedVersion)(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? credential_provider_node_1.defaultProvider, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, { ...config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: node_http_handler_1.NodeHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? (0, node_config_provider_1.loadConfig)(util_user_agent_node_1.NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; + + +/***/ }), + +/***/ 5047: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRuntimeConfig = void 0; +const core_1 = __nccwpck_require__(8704); +const core_2 = __nccwpck_require__(402); +const smithy_client_1 = __nccwpck_require__(1411); +const url_parser_1 = __nccwpck_require__(4494); +const util_base64_1 = __nccwpck_require__(8385); +const util_utf8_1 = __nccwpck_require__(1577); +const httpAuthSchemeProvider_1 = __nccwpck_require__(5295); +const endpointResolver_1 = __nccwpck_require__(8569); +const getRuntimeConfig = (config) => { + return { + apiVersion: "2019-06-10", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? httpAuthSchemeProvider_1.defaultSSOOIDCHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new core_1.AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new core_2.NoAuthSigner(), + }, + ], + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "SSO OIDC", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; + + +/***/ }), + +/***/ 2041: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveHttpAuthSchemeConfig = exports.defaultSSOHttpAuthSchemeProvider = exports.defaultSSOHttpAuthSchemeParametersProvider = void 0; +const core_1 = __nccwpck_require__(8704); +const util_middleware_1 = __nccwpck_require__(6324); +const defaultSSOHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: (0, util_middleware_1.getSmithyContext)(context).operation, + region: (await (0, util_middleware_1.normalizeProvider)(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +exports.defaultSSOHttpAuthSchemeParametersProvider = defaultSSOHttpAuthSchemeParametersProvider; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "awsssoportal", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +const defaultSSOHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "GetRoleCredentials": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "ListAccountRoles": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "ListAccounts": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "Logout": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +exports.defaultSSOHttpAuthSchemeProvider = defaultSSOHttpAuthSchemeProvider; +const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = (0, core_1.resolveAwsSdkSigV4Config)(config); + return { + ...config_0, + }; +}; +exports.resolveHttpAuthSchemeConfig = resolveHttpAuthSchemeConfig; + + +/***/ }), + +/***/ 3903: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = __nccwpck_require__(3068); +const util_endpoints_2 = __nccwpck_require__(9674); +const ruleset_1 = __nccwpck_require__(1308); +const cache = new util_endpoints_2.EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS"], +}); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => (0, util_endpoints_2.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; +util_endpoints_2.customEndpointFunctions.aws = util_endpoints_1.awsEndpointFunctions; + + +/***/ }), + +/***/ 1308: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ruleSet = void 0; +const u = "required", v = "fn", w = "argv", x = "ref"; +const a = true, b = "isSet", c = "booleanEquals", d = "error", e = "endpoint", f = "tree", g = "PartitionResult", h = "getAttr", i = { [u]: false, "type": "String" }, j = { [u]: true, "default": false, "type": "Boolean" }, k = { [x]: "Endpoint" }, l = { [v]: c, [w]: [{ [x]: "UseFIPS" }, true] }, m = { [v]: c, [w]: [{ [x]: "UseDualStack" }, true] }, n = {}, o = { [v]: h, [w]: [{ [x]: g }, "supportsFIPS"] }, p = { [x]: g }, q = { [v]: c, [w]: [true, { [v]: h, [w]: [p, "supportsDualStack"] }] }, r = [l], s = [m], t = [{ [x]: "Region" }]; +const _data = { version: "1.0", parameters: { Region: i, UseDualStack: j, UseFIPS: j, Endpoint: i }, rules: [{ conditions: [{ [v]: b, [w]: [k] }], rules: [{ conditions: r, error: "Invalid Configuration: FIPS and custom endpoint are not supported", type: d }, { conditions: s, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", type: d }, { endpoint: { url: k, properties: n, headers: n }, type: e }], type: f }, { conditions: [{ [v]: b, [w]: t }], rules: [{ conditions: [{ [v]: "aws.partition", [w]: t, assign: g }], rules: [{ conditions: [l, m], rules: [{ conditions: [{ [v]: c, [w]: [a, o] }, q], rules: [{ endpoint: { url: "https://portal.sso-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", type: d }], type: f }, { conditions: r, rules: [{ conditions: [{ [v]: c, [w]: [o, a] }], rules: [{ conditions: [{ [v]: "stringEquals", [w]: [{ [v]: h, [w]: [p, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://portal.sso.{Region}.amazonaws.com", properties: n, headers: n }, type: e }, { endpoint: { url: "https://portal.sso-fips.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "FIPS is enabled but this partition does not support FIPS", type: d }], type: f }, { conditions: s, rules: [{ conditions: [q], rules: [{ endpoint: { url: "https://portal.sso.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: n, headers: n }, type: e }], type: f }, { error: "DualStack is enabled but this partition does not support DualStack", type: d }], type: f }, { endpoint: { url: "https://portal.sso.{Region}.{PartitionResult#dnsSuffix}", properties: n, headers: n }, type: e }], type: f }], type: f }, { error: "Invalid Configuration: Missing Region", type: d }] }; +exports.ruleSet = _data; + + +/***/ }), + +/***/ 2054: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + GetRoleCredentialsCommand: () => GetRoleCredentialsCommand, + GetRoleCredentialsRequestFilterSensitiveLog: () => GetRoleCredentialsRequestFilterSensitiveLog, + GetRoleCredentialsResponseFilterSensitiveLog: () => GetRoleCredentialsResponseFilterSensitiveLog, + InvalidRequestException: () => InvalidRequestException, + ListAccountRolesCommand: () => ListAccountRolesCommand, + ListAccountRolesRequestFilterSensitiveLog: () => ListAccountRolesRequestFilterSensitiveLog, + ListAccountsCommand: () => ListAccountsCommand, + ListAccountsRequestFilterSensitiveLog: () => ListAccountsRequestFilterSensitiveLog, + LogoutCommand: () => LogoutCommand, + LogoutRequestFilterSensitiveLog: () => LogoutRequestFilterSensitiveLog, + ResourceNotFoundException: () => ResourceNotFoundException, + RoleCredentialsFilterSensitiveLog: () => RoleCredentialsFilterSensitiveLog, + SSO: () => SSO, + SSOClient: () => SSOClient, + SSOServiceException: () => SSOServiceException, + TooManyRequestsException: () => TooManyRequestsException, + UnauthorizedException: () => UnauthorizedException, + __Client: () => import_smithy_client.Client, + paginateListAccountRoles: () => paginateListAccountRoles, + paginateListAccounts: () => paginateListAccounts +}); +module.exports = __toCommonJS(src_exports); + +// src/SSOClient.ts +var import_middleware_host_header = __nccwpck_require__(2590); +var import_middleware_logger = __nccwpck_require__(5242); +var import_middleware_recursion_detection = __nccwpck_require__(1568); +var import_middleware_user_agent = __nccwpck_require__(2959); +var import_config_resolver = __nccwpck_require__(9316); +var import_core = __nccwpck_require__(402); +var import_middleware_content_length = __nccwpck_require__(7212); +var import_middleware_endpoint = __nccwpck_require__(99); +var import_middleware_retry = __nccwpck_require__(9618); + +var import_httpAuthSchemeProvider = __nccwpck_require__(2041); + +// src/endpoint/EndpointParameters.ts +var resolveClientEndpointParameters = /* @__PURE__ */ __name((options) => { + return { + ...options, + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + defaultSigningName: "awsssoportal" + }; +}, "resolveClientEndpointParameters"); +var commonParams = { + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" } +}; + +// src/SSOClient.ts +var import_runtimeConfig = __nccwpck_require__(2696); + +// src/runtimeExtensions.ts +var import_region_config_resolver = __nccwpck_require__(6463); +var import_protocol_http = __nccwpck_require__(2356); +var import_smithy_client = __nccwpck_require__(1411); + +// src/auth/httpAuthExtensionConfiguration.ts +var getHttpAuthExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + } + }; +}, "getHttpAuthExtensionConfiguration"); +var resolveHttpAuthRuntimeConfig = /* @__PURE__ */ __name((config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials() + }; +}, "resolveHttpAuthRuntimeConfig"); + +// src/runtimeExtensions.ts +var asPartial = /* @__PURE__ */ __name((t) => t, "asPartial"); +var resolveRuntimeExtensions = /* @__PURE__ */ __name((runtimeConfig, extensions) => { + const extensionConfiguration = { + ...asPartial((0, import_region_config_resolver.getAwsRegionExtensionConfiguration)(runtimeConfig)), + ...asPartial((0, import_smithy_client.getDefaultExtensionConfiguration)(runtimeConfig)), + ...asPartial((0, import_protocol_http.getHttpHandlerExtensionConfiguration)(runtimeConfig)), + ...asPartial(getHttpAuthExtensionConfiguration(runtimeConfig)) + }; + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return { + ...runtimeConfig, + ...(0, import_region_config_resolver.resolveAwsRegionExtensionConfiguration)(extensionConfiguration), + ...(0, import_smithy_client.resolveDefaultRuntimeConfig)(extensionConfiguration), + ...(0, import_protocol_http.resolveHttpHandlerRuntimeConfig)(extensionConfiguration), + ...resolveHttpAuthRuntimeConfig(extensionConfiguration) + }; +}, "resolveRuntimeExtensions"); + +// src/SSOClient.ts +var _SSOClient = class _SSOClient extends import_smithy_client.Client { + constructor(...[configuration]) { + const _config_0 = (0, import_runtimeConfig.getRuntimeConfig)(configuration || {}); + const _config_1 = resolveClientEndpointParameters(_config_0); + const _config_2 = (0, import_middleware_user_agent.resolveUserAgentConfig)(_config_1); + const _config_3 = (0, import_middleware_retry.resolveRetryConfig)(_config_2); + const _config_4 = (0, import_config_resolver.resolveRegionConfig)(_config_3); + const _config_5 = (0, import_middleware_host_header.resolveHostHeaderConfig)(_config_4); + const _config_6 = (0, import_middleware_endpoint.resolveEndpointConfig)(_config_5); + const _config_7 = (0, import_httpAuthSchemeProvider.resolveHttpAuthSchemeConfig)(_config_6); + const _config_8 = resolveRuntimeExtensions(_config_7, (configuration == null ? void 0 : configuration.extensions) || []); + super(_config_8); + this.config = _config_8; + this.middlewareStack.use((0, import_middleware_user_agent.getUserAgentPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_retry.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_content_length.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_host_header.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_logger.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, import_middleware_recursion_detection.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use( + (0, import_core.getHttpAuthSchemeEndpointRuleSetPlugin)(this.config, { + httpAuthSchemeParametersProvider: import_httpAuthSchemeProvider.defaultSSOHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new import_core.DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials + }) + }) + ); + this.middlewareStack.use((0, import_core.getHttpSigningPlugin)(this.config)); + } + /** + * Destroy underlying resources, like sockets. It's usually not necessary to do this. + * However in Node.js, it's best to explicitly shut down the client's agent when it is no longer needed. + * Otherwise, sockets might stay open for quite a long time before the server terminates them. + */ + destroy() { + super.destroy(); + } +}; +__name(_SSOClient, "SSOClient"); +var SSOClient = _SSOClient; + +// src/SSO.ts + + +// src/commands/GetRoleCredentialsCommand.ts + +var import_middleware_serde = __nccwpck_require__(3255); + + +// src/models/models_0.ts + + +// src/models/SSOServiceException.ts + +var _SSOServiceException = class _SSOServiceException extends import_smithy_client.ServiceException { + /** + * @internal + */ + constructor(options) { + super(options); + Object.setPrototypeOf(this, _SSOServiceException.prototype); + } +}; +__name(_SSOServiceException, "SSOServiceException"); +var SSOServiceException = _SSOServiceException; + +// src/models/models_0.ts +var _InvalidRequestException = class _InvalidRequestException extends SSOServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidRequestException", + $fault: "client", + ...opts + }); + this.name = "InvalidRequestException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidRequestException.prototype); + } +}; +__name(_InvalidRequestException, "InvalidRequestException"); +var InvalidRequestException = _InvalidRequestException; +var _ResourceNotFoundException = class _ResourceNotFoundException extends SSOServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "ResourceNotFoundException", + $fault: "client", + ...opts + }); + this.name = "ResourceNotFoundException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _ResourceNotFoundException.prototype); + } +}; +__name(_ResourceNotFoundException, "ResourceNotFoundException"); +var ResourceNotFoundException = _ResourceNotFoundException; +var _TooManyRequestsException = class _TooManyRequestsException extends SSOServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "TooManyRequestsException", + $fault: "client", + ...opts + }); + this.name = "TooManyRequestsException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _TooManyRequestsException.prototype); + } +}; +__name(_TooManyRequestsException, "TooManyRequestsException"); +var TooManyRequestsException = _TooManyRequestsException; +var _UnauthorizedException = class _UnauthorizedException extends SSOServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "UnauthorizedException", + $fault: "client", + ...opts + }); + this.name = "UnauthorizedException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _UnauthorizedException.prototype); + } +}; +__name(_UnauthorizedException, "UnauthorizedException"); +var UnauthorizedException = _UnauthorizedException; +var GetRoleCredentialsRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client.SENSITIVE_STRING } +}), "GetRoleCredentialsRequestFilterSensitiveLog"); +var RoleCredentialsFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.secretAccessKey && { secretAccessKey: import_smithy_client.SENSITIVE_STRING }, + ...obj.sessionToken && { sessionToken: import_smithy_client.SENSITIVE_STRING } +}), "RoleCredentialsFilterSensitiveLog"); +var GetRoleCredentialsResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.roleCredentials && { roleCredentials: RoleCredentialsFilterSensitiveLog(obj.roleCredentials) } +}), "GetRoleCredentialsResponseFilterSensitiveLog"); +var ListAccountRolesRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client.SENSITIVE_STRING } +}), "ListAccountRolesRequestFilterSensitiveLog"); +var ListAccountsRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client.SENSITIVE_STRING } +}), "ListAccountsRequestFilterSensitiveLog"); +var LogoutRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.accessToken && { accessToken: import_smithy_client.SENSITIVE_STRING } +}), "LogoutRequestFilterSensitiveLog"); + +// src/protocols/Aws_restJson1.ts +var import_core2 = __nccwpck_require__(8704); + + +var se_GetRoleCredentialsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xasbt]: input[_aT] + }); + b.bp("/federation/credentials"); + const query = (0, import_smithy_client.map)({ + [_rn]: [, (0, import_smithy_client.expectNonNull)(input[_rN], `roleName`)], + [_ai]: [, (0, import_smithy_client.expectNonNull)(input[_aI], `accountId`)] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_GetRoleCredentialsCommand"); +var se_ListAccountRolesCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xasbt]: input[_aT] + }); + b.bp("/assignment/roles"); + const query = (0, import_smithy_client.map)({ + [_nt]: [, input[_nT]], + [_mr]: [() => input.maxResults !== void 0, () => input[_mR].toString()], + [_ai]: [, (0, import_smithy_client.expectNonNull)(input[_aI], `accountId`)] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_ListAccountRolesCommand"); +var se_ListAccountsCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xasbt]: input[_aT] + }); + b.bp("/assignment/accounts"); + const query = (0, import_smithy_client.map)({ + [_nt]: [, input[_nT]], + [_mr]: [() => input.maxResults !== void 0, () => input[_mR].toString()] + }); + let body; + b.m("GET").h(headers).q(query).b(body); + return b.build(); +}, "se_ListAccountsCommand"); +var se_LogoutCommand = /* @__PURE__ */ __name(async (input, context) => { + const b = (0, import_core.requestBuilder)(input, context); + const headers = (0, import_smithy_client.map)({}, import_smithy_client.isSerializableHeaderValue, { + [_xasbt]: input[_aT] + }); + b.bp("/logout"); + let body; + b.m("POST").h(headers).b(body); + return b.build(); +}, "se_LogoutCommand"); +var de_GetRoleCredentialsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client.take)(data, { + roleCredentials: import_smithy_client._json + }); + Object.assign(contents, doc); + return contents; +}, "de_GetRoleCredentialsCommand"); +var de_ListAccountRolesCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client.take)(data, { + nextToken: import_smithy_client.expectString, + roleList: import_smithy_client._json + }); + Object.assign(contents, doc); + return contents; +}, "de_ListAccountRolesCommand"); +var de_ListAccountsCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + const data = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.expectObject)(await (0, import_core2.parseJsonBody)(output.body, context)), "body"); + const doc = (0, import_smithy_client.take)(data, { + accountList: import_smithy_client._json, + nextToken: import_smithy_client.expectString + }); + Object.assign(contents, doc); + return contents; +}, "de_ListAccountsCommand"); +var de_LogoutCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return de_CommandError(output, context); + } + const contents = (0, import_smithy_client.map)({ + $metadata: deserializeMetadata(output) + }); + await (0, import_smithy_client.collectBody)(output.body, context); + return contents; +}, "de_LogoutCommand"); +var de_CommandError = /* @__PURE__ */ __name(async (output, context) => { + const parsedOutput = { + ...output, + body: await (0, import_core2.parseJsonErrorBody)(output.body, context) + }; + const errorCode = (0, import_core2.loadRestJsonErrorCode)(output, parsedOutput.body); + switch (errorCode) { + case "InvalidRequestException": + case "com.amazonaws.sso#InvalidRequestException": + throw await de_InvalidRequestExceptionRes(parsedOutput, context); + case "ResourceNotFoundException": + case "com.amazonaws.sso#ResourceNotFoundException": + throw await de_ResourceNotFoundExceptionRes(parsedOutput, context); + case "TooManyRequestsException": + case "com.amazonaws.sso#TooManyRequestsException": + throw await de_TooManyRequestsExceptionRes(parsedOutput, context); + case "UnauthorizedException": + case "com.amazonaws.sso#UnauthorizedException": + throw await de_UnauthorizedExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody, + errorCode + }); + } +}, "de_CommandError"); +var throwDefaultError = (0, import_smithy_client.withBaseException)(SSOServiceException); +var de_InvalidRequestExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + message: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new InvalidRequestException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_InvalidRequestExceptionRes"); +var de_ResourceNotFoundExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + message: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new ResourceNotFoundException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_ResourceNotFoundExceptionRes"); +var de_TooManyRequestsExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + message: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new TooManyRequestsException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_TooManyRequestsExceptionRes"); +var de_UnauthorizedExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const contents = (0, import_smithy_client.map)({}); + const data = parsedOutput.body; + const doc = (0, import_smithy_client.take)(data, { + message: import_smithy_client.expectString + }); + Object.assign(contents, doc); + const exception = new UnauthorizedException({ + $metadata: deserializeMetadata(parsedOutput), + ...contents + }); + return (0, import_smithy_client.decorateServiceException)(exception, parsedOutput.body); +}, "de_UnauthorizedExceptionRes"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); +var _aI = "accountId"; +var _aT = "accessToken"; +var _ai = "account_id"; +var _mR = "maxResults"; +var _mr = "max_result"; +var _nT = "nextToken"; +var _nt = "next_token"; +var _rN = "roleName"; +var _rn = "role_name"; +var _xasbt = "x-amz-sso_bearer_token"; + +// src/commands/GetRoleCredentialsCommand.ts +var _GetRoleCredentialsCommand = class _GetRoleCredentialsCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("SWBPortalService", "GetRoleCredentials", {}).n("SSOClient", "GetRoleCredentialsCommand").f(GetRoleCredentialsRequestFilterSensitiveLog, GetRoleCredentialsResponseFilterSensitiveLog).ser(se_GetRoleCredentialsCommand).de(de_GetRoleCredentialsCommand).build() { +}; +__name(_GetRoleCredentialsCommand, "GetRoleCredentialsCommand"); +var GetRoleCredentialsCommand = _GetRoleCredentialsCommand; + +// src/commands/ListAccountRolesCommand.ts + + + +var _ListAccountRolesCommand = class _ListAccountRolesCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("SWBPortalService", "ListAccountRoles", {}).n("SSOClient", "ListAccountRolesCommand").f(ListAccountRolesRequestFilterSensitiveLog, void 0).ser(se_ListAccountRolesCommand).de(de_ListAccountRolesCommand).build() { +}; +__name(_ListAccountRolesCommand, "ListAccountRolesCommand"); +var ListAccountRolesCommand = _ListAccountRolesCommand; + +// src/commands/ListAccountsCommand.ts + + + +var _ListAccountsCommand = class _ListAccountsCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("SWBPortalService", "ListAccounts", {}).n("SSOClient", "ListAccountsCommand").f(ListAccountsRequestFilterSensitiveLog, void 0).ser(se_ListAccountsCommand).de(de_ListAccountsCommand).build() { +}; +__name(_ListAccountsCommand, "ListAccountsCommand"); +var ListAccountsCommand = _ListAccountsCommand; + +// src/commands/LogoutCommand.ts + + + +var _LogoutCommand = class _LogoutCommand extends import_smithy_client.Command.classBuilder().ep(commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("SWBPortalService", "Logout", {}).n("SSOClient", "LogoutCommand").f(LogoutRequestFilterSensitiveLog, void 0).ser(se_LogoutCommand).de(de_LogoutCommand).build() { +}; +__name(_LogoutCommand, "LogoutCommand"); +var LogoutCommand = _LogoutCommand; + +// src/SSO.ts +var commands = { + GetRoleCredentialsCommand, + ListAccountRolesCommand, + ListAccountsCommand, + LogoutCommand +}; +var _SSO = class _SSO extends SSOClient { +}; +__name(_SSO, "SSO"); +var SSO = _SSO; +(0, import_smithy_client.createAggregatedClient)(commands, SSO); + +// src/pagination/ListAccountRolesPaginator.ts + +var paginateListAccountRoles = (0, import_core.createPaginator)(SSOClient, ListAccountRolesCommand, "nextToken", "nextToken", "maxResults"); + +// src/pagination/ListAccountsPaginator.ts + +var paginateListAccounts = (0, import_core.createPaginator)(SSOClient, ListAccountsCommand, "nextToken", "nextToken", "maxResults"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 2696: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRuntimeConfig = void 0; +const tslib_1 = __nccwpck_require__(1860); +const package_json_1 = tslib_1.__importDefault(__nccwpck_require__(5188)); +const core_1 = __nccwpck_require__(8704); +const util_user_agent_node_1 = __nccwpck_require__(1656); +const config_resolver_1 = __nccwpck_require__(9316); +const hash_node_1 = __nccwpck_require__(5092); +const middleware_retry_1 = __nccwpck_require__(9618); +const node_config_provider_1 = __nccwpck_require__(5704); +const node_http_handler_1 = __nccwpck_require__(1279); +const util_body_length_node_1 = __nccwpck_require__(3638); +const util_retry_1 = __nccwpck_require__(5518); +const runtimeConfig_shared_1 = __nccwpck_require__(8073); +const smithy_client_1 = __nccwpck_require__(1411); +const util_defaults_mode_node_1 = __nccwpck_require__(5435); +const smithy_client_2 = __nccwpck_require__(1411); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + (0, core_1.emitWarningIfUnsupportedVersion)(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, { ...config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: node_http_handler_1.NodeHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? (0, node_config_provider_1.loadConfig)(util_user_agent_node_1.NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; + + +/***/ }), + +/***/ 8073: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRuntimeConfig = void 0; +const core_1 = __nccwpck_require__(8704); +const core_2 = __nccwpck_require__(402); +const smithy_client_1 = __nccwpck_require__(1411); +const url_parser_1 = __nccwpck_require__(4494); +const util_base64_1 = __nccwpck_require__(8385); +const util_utf8_1 = __nccwpck_require__(1577); +const httpAuthSchemeProvider_1 = __nccwpck_require__(2041); +const endpointResolver_1 = __nccwpck_require__(3903); +const getRuntimeConfig = (config) => { + return { + apiVersion: "2019-06-10", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? httpAuthSchemeProvider_1.defaultSSOHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new core_1.AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new core_2.NoAuthSigner(), + }, + ], + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "SSO", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; + + +/***/ }), + +/***/ 1548: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.STSClient = exports.__Client = void 0; +const middleware_host_header_1 = __nccwpck_require__(2590); +const middleware_logger_1 = __nccwpck_require__(5242); +const middleware_recursion_detection_1 = __nccwpck_require__(1568); +const middleware_user_agent_1 = __nccwpck_require__(2959); +const config_resolver_1 = __nccwpck_require__(9316); +const core_1 = __nccwpck_require__(402); +const middleware_content_length_1 = __nccwpck_require__(7212); +const middleware_endpoint_1 = __nccwpck_require__(99); +const middleware_retry_1 = __nccwpck_require__(9618); +const smithy_client_1 = __nccwpck_require__(1411); +Object.defineProperty(exports, "__Client", ({ enumerable: true, get: function () { return smithy_client_1.Client; } })); +const httpAuthSchemeProvider_1 = __nccwpck_require__(9232); +const EndpointParameters_1 = __nccwpck_require__(2912); +const runtimeConfig_1 = __nccwpck_require__(2769); +const runtimeExtensions_1 = __nccwpck_require__(309); +class STSClient extends smithy_client_1.Client { + constructor(...[configuration]) { + const _config_0 = (0, runtimeConfig_1.getRuntimeConfig)(configuration || {}); + const _config_1 = (0, EndpointParameters_1.resolveClientEndpointParameters)(_config_0); + const _config_2 = (0, middleware_user_agent_1.resolveUserAgentConfig)(_config_1); + const _config_3 = (0, middleware_retry_1.resolveRetryConfig)(_config_2); + const _config_4 = (0, config_resolver_1.resolveRegionConfig)(_config_3); + const _config_5 = (0, middleware_host_header_1.resolveHostHeaderConfig)(_config_4); + const _config_6 = (0, middleware_endpoint_1.resolveEndpointConfig)(_config_5); + const _config_7 = (0, httpAuthSchemeProvider_1.resolveHttpAuthSchemeConfig)(_config_6); + const _config_8 = (0, runtimeExtensions_1.resolveRuntimeExtensions)(_config_7, configuration?.extensions || []); + super(_config_8); + this.config = _config_8; + this.middlewareStack.use((0, middleware_user_agent_1.getUserAgentPlugin)(this.config)); + this.middlewareStack.use((0, middleware_retry_1.getRetryPlugin)(this.config)); + this.middlewareStack.use((0, middleware_content_length_1.getContentLengthPlugin)(this.config)); + this.middlewareStack.use((0, middleware_host_header_1.getHostHeaderPlugin)(this.config)); + this.middlewareStack.use((0, middleware_logger_1.getLoggerPlugin)(this.config)); + this.middlewareStack.use((0, middleware_recursion_detection_1.getRecursionDetectionPlugin)(this.config)); + this.middlewareStack.use((0, core_1.getHttpAuthSchemeEndpointRuleSetPlugin)(this.config, { + httpAuthSchemeParametersProvider: httpAuthSchemeProvider_1.defaultSTSHttpAuthSchemeParametersProvider, + identityProviderConfigProvider: async (config) => new core_1.DefaultIdentityProviderConfig({ + "aws.auth#sigv4": config.credentials, + }), + })); + this.middlewareStack.use((0, core_1.getHttpSigningPlugin)(this.config)); + } + destroy() { + super.destroy(); + } +} +exports.STSClient = STSClient; + + +/***/ }), + +/***/ 9915: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveHttpAuthRuntimeConfig = exports.getHttpAuthExtensionConfiguration = void 0; +const getHttpAuthExtensionConfiguration = (runtimeConfig) => { + const _httpAuthSchemes = runtimeConfig.httpAuthSchemes; + let _httpAuthSchemeProvider = runtimeConfig.httpAuthSchemeProvider; + let _credentials = runtimeConfig.credentials; + return { + setHttpAuthScheme(httpAuthScheme) { + const index = _httpAuthSchemes.findIndex((scheme) => scheme.schemeId === httpAuthScheme.schemeId); + if (index === -1) { + _httpAuthSchemes.push(httpAuthScheme); + } + else { + _httpAuthSchemes.splice(index, 1, httpAuthScheme); + } + }, + httpAuthSchemes() { + return _httpAuthSchemes; + }, + setHttpAuthSchemeProvider(httpAuthSchemeProvider) { + _httpAuthSchemeProvider = httpAuthSchemeProvider; + }, + httpAuthSchemeProvider() { + return _httpAuthSchemeProvider; + }, + setCredentials(credentials) { + _credentials = credentials; + }, + credentials() { + return _credentials; + }, + }; +}; +exports.getHttpAuthExtensionConfiguration = getHttpAuthExtensionConfiguration; +const resolveHttpAuthRuntimeConfig = (config) => { + return { + httpAuthSchemes: config.httpAuthSchemes(), + httpAuthSchemeProvider: config.httpAuthSchemeProvider(), + credentials: config.credentials(), + }; +}; +exports.resolveHttpAuthRuntimeConfig = resolveHttpAuthRuntimeConfig; + + +/***/ }), + +/***/ 9232: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveHttpAuthSchemeConfig = exports.resolveStsAuthConfig = exports.defaultSTSHttpAuthSchemeProvider = exports.defaultSTSHttpAuthSchemeParametersProvider = void 0; +const core_1 = __nccwpck_require__(8704); +const util_middleware_1 = __nccwpck_require__(6324); +const STSClient_1 = __nccwpck_require__(1548); +const defaultSTSHttpAuthSchemeParametersProvider = async (config, context, input) => { + return { + operation: (0, util_middleware_1.getSmithyContext)(context).operation, + region: (await (0, util_middleware_1.normalizeProvider)(config.region)()) || + (() => { + throw new Error("expected `region` to be configured for `aws.auth#sigv4`"); + })(), + }; +}; +exports.defaultSTSHttpAuthSchemeParametersProvider = defaultSTSHttpAuthSchemeParametersProvider; +function createAwsAuthSigv4HttpAuthOption(authParameters) { + return { + schemeId: "aws.auth#sigv4", + signingProperties: { + name: "sts", + region: authParameters.region, + }, + propertiesExtractor: (config, context) => ({ + signingProperties: { + config, + context, + }, + }), + }; +} +function createSmithyApiNoAuthHttpAuthOption(authParameters) { + return { + schemeId: "smithy.api#noAuth", + }; +} +const defaultSTSHttpAuthSchemeProvider = (authParameters) => { + const options = []; + switch (authParameters.operation) { + case "AssumeRoleWithSAML": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + case "AssumeRoleWithWebIdentity": { + options.push(createSmithyApiNoAuthHttpAuthOption(authParameters)); + break; + } + default: { + options.push(createAwsAuthSigv4HttpAuthOption(authParameters)); + } + } + return options; +}; +exports.defaultSTSHttpAuthSchemeProvider = defaultSTSHttpAuthSchemeProvider; +const resolveStsAuthConfig = (input) => ({ + ...input, + stsClientCtor: STSClient_1.STSClient, +}); +exports.resolveStsAuthConfig = resolveStsAuthConfig; +const resolveHttpAuthSchemeConfig = (config) => { + const config_0 = (0, exports.resolveStsAuthConfig)(config); + const config_1 = (0, core_1.resolveAwsSdkSigV4Config)(config_0); + return { + ...config_1, + }; +}; +exports.resolveHttpAuthSchemeConfig = resolveHttpAuthSchemeConfig; + + +/***/ }), + +/***/ 2912: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.commonParams = exports.resolveClientEndpointParameters = void 0; +const resolveClientEndpointParameters = (options) => { + return { + ...options, + useDualstackEndpoint: options.useDualstackEndpoint ?? false, + useFipsEndpoint: options.useFipsEndpoint ?? false, + useGlobalEndpoint: options.useGlobalEndpoint ?? false, + defaultSigningName: "sts", + }; +}; +exports.resolveClientEndpointParameters = resolveClientEndpointParameters; +exports.commonParams = { + UseGlobalEndpoint: { type: "builtInParams", name: "useGlobalEndpoint" }, + UseFIPS: { type: "builtInParams", name: "useFipsEndpoint" }, + Endpoint: { type: "builtInParams", name: "endpoint" }, + Region: { type: "builtInParams", name: "region" }, + UseDualStack: { type: "builtInParams", name: "useDualstackEndpoint" }, +}; + + +/***/ }), + +/***/ 1030: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.defaultEndpointResolver = void 0; +const util_endpoints_1 = __nccwpck_require__(3068); +const util_endpoints_2 = __nccwpck_require__(9674); +const ruleset_1 = __nccwpck_require__(6199); +const cache = new util_endpoints_2.EndpointCache({ + size: 50, + params: ["Endpoint", "Region", "UseDualStack", "UseFIPS", "UseGlobalEndpoint"], +}); +const defaultEndpointResolver = (endpointParams, context = {}) => { + return cache.get(endpointParams, () => (0, util_endpoints_2.resolveEndpoint)(ruleset_1.ruleSet, { + endpointParams: endpointParams, + logger: context.logger, + })); +}; +exports.defaultEndpointResolver = defaultEndpointResolver; +util_endpoints_2.customEndpointFunctions.aws = util_endpoints_1.awsEndpointFunctions; + + +/***/ }), + +/***/ 6199: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ruleSet = void 0; +const F = "required", G = "type", H = "fn", I = "argv", J = "ref"; +const a = false, b = true, c = "booleanEquals", d = "stringEquals", e = "sigv4", f = "sts", g = "us-east-1", h = "endpoint", i = "https://sts.{Region}.{PartitionResult#dnsSuffix}", j = "tree", k = "error", l = "getAttr", m = { [F]: false, [G]: "String" }, n = { [F]: true, "default": false, [G]: "Boolean" }, o = { [J]: "Endpoint" }, p = { [H]: "isSet", [I]: [{ [J]: "Region" }] }, q = { [J]: "Region" }, r = { [H]: "aws.partition", [I]: [q], "assign": "PartitionResult" }, s = { [J]: "UseFIPS" }, t = { [J]: "UseDualStack" }, u = { "url": "https://sts.amazonaws.com", "properties": { "authSchemes": [{ "name": e, "signingName": f, "signingRegion": g }] }, "headers": {} }, v = {}, w = { "conditions": [{ [H]: d, [I]: [q, "aws-global"] }], [h]: u, [G]: h }, x = { [H]: c, [I]: [s, true] }, y = { [H]: c, [I]: [t, true] }, z = { [H]: l, [I]: [{ [J]: "PartitionResult" }, "supportsFIPS"] }, A = { [J]: "PartitionResult" }, B = { [H]: c, [I]: [true, { [H]: l, [I]: [A, "supportsDualStack"] }] }, C = [{ [H]: "isSet", [I]: [o] }], D = [x], E = [y]; +const _data = { version: "1.0", parameters: { Region: m, UseDualStack: n, UseFIPS: n, Endpoint: m, UseGlobalEndpoint: n }, rules: [{ conditions: [{ [H]: c, [I]: [{ [J]: "UseGlobalEndpoint" }, b] }, { [H]: "not", [I]: C }, p, r, { [H]: c, [I]: [s, a] }, { [H]: c, [I]: [t, a] }], rules: [{ conditions: [{ [H]: d, [I]: [q, "ap-northeast-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-south-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-southeast-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "ap-southeast-2"] }], endpoint: u, [G]: h }, w, { conditions: [{ [H]: d, [I]: [q, "ca-central-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-central-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-north-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-2"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "eu-west-3"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "sa-east-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, g] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-east-2"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-west-1"] }], endpoint: u, [G]: h }, { conditions: [{ [H]: d, [I]: [q, "us-west-2"] }], endpoint: u, [G]: h }, { endpoint: { url: i, properties: { authSchemes: [{ name: e, signingName: f, signingRegion: "{Region}" }] }, headers: v }, [G]: h }], [G]: j }, { conditions: C, rules: [{ conditions: D, error: "Invalid Configuration: FIPS and custom endpoint are not supported", [G]: k }, { conditions: E, error: "Invalid Configuration: Dualstack and custom endpoint are not supported", [G]: k }, { endpoint: { url: o, properties: v, headers: v }, [G]: h }], [G]: j }, { conditions: [p], rules: [{ conditions: [r], rules: [{ conditions: [x, y], rules: [{ conditions: [{ [H]: c, [I]: [b, z] }, B], rules: [{ endpoint: { url: "https://sts-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "FIPS and DualStack are enabled, but this partition does not support one or both", [G]: k }], [G]: j }, { conditions: D, rules: [{ conditions: [{ [H]: c, [I]: [z, b] }], rules: [{ conditions: [{ [H]: d, [I]: [{ [H]: l, [I]: [A, "name"] }, "aws-us-gov"] }], endpoint: { url: "https://sts.{Region}.amazonaws.com", properties: v, headers: v }, [G]: h }, { endpoint: { url: "https://sts-fips.{Region}.{PartitionResult#dnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "FIPS is enabled but this partition does not support FIPS", [G]: k }], [G]: j }, { conditions: E, rules: [{ conditions: [B], rules: [{ endpoint: { url: "https://sts.{Region}.{PartitionResult#dualStackDnsSuffix}", properties: v, headers: v }, [G]: h }], [G]: j }, { error: "DualStack is enabled but this partition does not support DualStack", [G]: k }], [G]: j }, w, { endpoint: { url: i, properties: v, headers: v }, [G]: h }], [G]: j }], [G]: j }, { error: "Invalid Configuration: Missing Region", [G]: k }] }; +exports.ruleSet = _data; + + +/***/ }), + +/***/ 1695: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AssumeRoleCommand: () => AssumeRoleCommand, + AssumeRoleResponseFilterSensitiveLog: () => AssumeRoleResponseFilterSensitiveLog, + AssumeRoleWithSAMLCommand: () => AssumeRoleWithSAMLCommand, + AssumeRoleWithSAMLRequestFilterSensitiveLog: () => AssumeRoleWithSAMLRequestFilterSensitiveLog, + AssumeRoleWithSAMLResponseFilterSensitiveLog: () => AssumeRoleWithSAMLResponseFilterSensitiveLog, + AssumeRoleWithWebIdentityCommand: () => AssumeRoleWithWebIdentityCommand, + AssumeRoleWithWebIdentityRequestFilterSensitiveLog: () => AssumeRoleWithWebIdentityRequestFilterSensitiveLog, + AssumeRoleWithWebIdentityResponseFilterSensitiveLog: () => AssumeRoleWithWebIdentityResponseFilterSensitiveLog, + AssumeRootCommand: () => AssumeRootCommand, + AssumeRootResponseFilterSensitiveLog: () => AssumeRootResponseFilterSensitiveLog, + ClientInputEndpointParameters: () => import_EndpointParameters10.ClientInputEndpointParameters, + CredentialsFilterSensitiveLog: () => CredentialsFilterSensitiveLog, + DecodeAuthorizationMessageCommand: () => DecodeAuthorizationMessageCommand, + ExpiredTokenException: () => ExpiredTokenException, + GetAccessKeyInfoCommand: () => GetAccessKeyInfoCommand, + GetCallerIdentityCommand: () => GetCallerIdentityCommand, + GetFederationTokenCommand: () => GetFederationTokenCommand, + GetFederationTokenResponseFilterSensitiveLog: () => GetFederationTokenResponseFilterSensitiveLog, + GetSessionTokenCommand: () => GetSessionTokenCommand, + GetSessionTokenResponseFilterSensitiveLog: () => GetSessionTokenResponseFilterSensitiveLog, + IDPCommunicationErrorException: () => IDPCommunicationErrorException, + IDPRejectedClaimException: () => IDPRejectedClaimException, + InvalidAuthorizationMessageException: () => InvalidAuthorizationMessageException, + InvalidIdentityTokenException: () => InvalidIdentityTokenException, + MalformedPolicyDocumentException: () => MalformedPolicyDocumentException, + PackedPolicyTooLargeException: () => PackedPolicyTooLargeException, + RegionDisabledException: () => RegionDisabledException, + STS: () => STS, + STSServiceException: () => STSServiceException, + decorateDefaultCredentialProvider: () => decorateDefaultCredentialProvider, + getDefaultRoleAssumer: () => getDefaultRoleAssumer2, + getDefaultRoleAssumerWithWebIdentity: () => getDefaultRoleAssumerWithWebIdentity2 +}); +module.exports = __toCommonJS(src_exports); +__reExport(src_exports, __nccwpck_require__(1548), module.exports); + +// src/STS.ts + + +// src/commands/AssumeRoleCommand.ts +var import_middleware_endpoint = __nccwpck_require__(99); +var import_middleware_serde = __nccwpck_require__(3255); + +var import_EndpointParameters = __nccwpck_require__(2912); + +// src/models/models_0.ts + + +// src/models/STSServiceException.ts +var import_smithy_client = __nccwpck_require__(1411); +var _STSServiceException = class _STSServiceException extends import_smithy_client.ServiceException { + /** + * @internal + */ + constructor(options) { + super(options); + Object.setPrototypeOf(this, _STSServiceException.prototype); + } +}; +__name(_STSServiceException, "STSServiceException"); +var STSServiceException = _STSServiceException; + +// src/models/models_0.ts +var _ExpiredTokenException = class _ExpiredTokenException extends STSServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "ExpiredTokenException", + $fault: "client", + ...opts + }); + this.name = "ExpiredTokenException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _ExpiredTokenException.prototype); + } +}; +__name(_ExpiredTokenException, "ExpiredTokenException"); +var ExpiredTokenException = _ExpiredTokenException; +var _MalformedPolicyDocumentException = class _MalformedPolicyDocumentException extends STSServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "MalformedPolicyDocumentException", + $fault: "client", + ...opts + }); + this.name = "MalformedPolicyDocumentException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _MalformedPolicyDocumentException.prototype); + } +}; +__name(_MalformedPolicyDocumentException, "MalformedPolicyDocumentException"); +var MalformedPolicyDocumentException = _MalformedPolicyDocumentException; +var _PackedPolicyTooLargeException = class _PackedPolicyTooLargeException extends STSServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "PackedPolicyTooLargeException", + $fault: "client", + ...opts + }); + this.name = "PackedPolicyTooLargeException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _PackedPolicyTooLargeException.prototype); + } +}; +__name(_PackedPolicyTooLargeException, "PackedPolicyTooLargeException"); +var PackedPolicyTooLargeException = _PackedPolicyTooLargeException; +var _RegionDisabledException = class _RegionDisabledException extends STSServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "RegionDisabledException", + $fault: "client", + ...opts + }); + this.name = "RegionDisabledException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _RegionDisabledException.prototype); + } +}; +__name(_RegionDisabledException, "RegionDisabledException"); +var RegionDisabledException = _RegionDisabledException; +var _IDPRejectedClaimException = class _IDPRejectedClaimException extends STSServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "IDPRejectedClaimException", + $fault: "client", + ...opts + }); + this.name = "IDPRejectedClaimException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _IDPRejectedClaimException.prototype); + } +}; +__name(_IDPRejectedClaimException, "IDPRejectedClaimException"); +var IDPRejectedClaimException = _IDPRejectedClaimException; +var _InvalidIdentityTokenException = class _InvalidIdentityTokenException extends STSServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidIdentityTokenException", + $fault: "client", + ...opts + }); + this.name = "InvalidIdentityTokenException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidIdentityTokenException.prototype); + } +}; +__name(_InvalidIdentityTokenException, "InvalidIdentityTokenException"); +var InvalidIdentityTokenException = _InvalidIdentityTokenException; +var _IDPCommunicationErrorException = class _IDPCommunicationErrorException extends STSServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "IDPCommunicationErrorException", + $fault: "client", + ...opts + }); + this.name = "IDPCommunicationErrorException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _IDPCommunicationErrorException.prototype); + } +}; +__name(_IDPCommunicationErrorException, "IDPCommunicationErrorException"); +var IDPCommunicationErrorException = _IDPCommunicationErrorException; +var _InvalidAuthorizationMessageException = class _InvalidAuthorizationMessageException extends STSServiceException { + /** + * @internal + */ + constructor(opts) { + super({ + name: "InvalidAuthorizationMessageException", + $fault: "client", + ...opts + }); + this.name = "InvalidAuthorizationMessageException"; + this.$fault = "client"; + Object.setPrototypeOf(this, _InvalidAuthorizationMessageException.prototype); + } +}; +__name(_InvalidAuthorizationMessageException, "InvalidAuthorizationMessageException"); +var InvalidAuthorizationMessageException = _InvalidAuthorizationMessageException; +var CredentialsFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SecretAccessKey && { SecretAccessKey: import_smithy_client.SENSITIVE_STRING } +}), "CredentialsFilterSensitiveLog"); +var AssumeRoleResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.Credentials && { Credentials: CredentialsFilterSensitiveLog(obj.Credentials) } +}), "AssumeRoleResponseFilterSensitiveLog"); +var AssumeRoleWithSAMLRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.SAMLAssertion && { SAMLAssertion: import_smithy_client.SENSITIVE_STRING } +}), "AssumeRoleWithSAMLRequestFilterSensitiveLog"); +var AssumeRoleWithSAMLResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.Credentials && { Credentials: CredentialsFilterSensitiveLog(obj.Credentials) } +}), "AssumeRoleWithSAMLResponseFilterSensitiveLog"); +var AssumeRoleWithWebIdentityRequestFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.WebIdentityToken && { WebIdentityToken: import_smithy_client.SENSITIVE_STRING } +}), "AssumeRoleWithWebIdentityRequestFilterSensitiveLog"); +var AssumeRoleWithWebIdentityResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.Credentials && { Credentials: CredentialsFilterSensitiveLog(obj.Credentials) } +}), "AssumeRoleWithWebIdentityResponseFilterSensitiveLog"); +var AssumeRootResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.Credentials && { Credentials: CredentialsFilterSensitiveLog(obj.Credentials) } +}), "AssumeRootResponseFilterSensitiveLog"); +var GetFederationTokenResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.Credentials && { Credentials: CredentialsFilterSensitiveLog(obj.Credentials) } +}), "GetFederationTokenResponseFilterSensitiveLog"); +var GetSessionTokenResponseFilterSensitiveLog = /* @__PURE__ */ __name((obj) => ({ + ...obj, + ...obj.Credentials && { Credentials: CredentialsFilterSensitiveLog(obj.Credentials) } +}), "GetSessionTokenResponseFilterSensitiveLog"); + +// src/protocols/Aws_query.ts +var import_core = __nccwpck_require__(8704); +var import_protocol_http = __nccwpck_require__(2356); + +var se_AssumeRoleCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleRequest(input, context), + [_A]: _AR, + [_V]: _ + }); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_AssumeRoleCommand"); +var se_AssumeRoleWithSAMLCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleWithSAMLRequest(input, context), + [_A]: _ARWSAML, + [_V]: _ + }); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_AssumeRoleWithSAMLCommand"); +var se_AssumeRoleWithWebIdentityCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRoleWithWebIdentityRequest(input, context), + [_A]: _ARWWI, + [_V]: _ + }); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_AssumeRoleWithWebIdentityCommand"); +var se_AssumeRootCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_AssumeRootRequest(input, context), + [_A]: _ARs, + [_V]: _ + }); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_AssumeRootCommand"); +var se_DecodeAuthorizationMessageCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_DecodeAuthorizationMessageRequest(input, context), + [_A]: _DAM, + [_V]: _ + }); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_DecodeAuthorizationMessageCommand"); +var se_GetAccessKeyInfoCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_GetAccessKeyInfoRequest(input, context), + [_A]: _GAKI, + [_V]: _ + }); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_GetAccessKeyInfoCommand"); +var se_GetCallerIdentityCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_GetCallerIdentityRequest(input, context), + [_A]: _GCI, + [_V]: _ + }); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_GetCallerIdentityCommand"); +var se_GetFederationTokenCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_GetFederationTokenRequest(input, context), + [_A]: _GFT, + [_V]: _ + }); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_GetFederationTokenCommand"); +var se_GetSessionTokenCommand = /* @__PURE__ */ __name(async (input, context) => { + const headers = SHARED_HEADERS; + let body; + body = buildFormUrlencodedString({ + ...se_GetSessionTokenRequest(input, context), + [_A]: _GST, + [_V]: _ + }); + return buildHttpRpcRequest(context, headers, "/", void 0, body); +}, "se_GetSessionTokenCommand"); +var de_AssumeRoleCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseXmlBody)(output.body, context); + let contents = {}; + contents = de_AssumeRoleResponse(data.AssumeRoleResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_AssumeRoleCommand"); +var de_AssumeRoleWithSAMLCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseXmlBody)(output.body, context); + let contents = {}; + contents = de_AssumeRoleWithSAMLResponse(data.AssumeRoleWithSAMLResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_AssumeRoleWithSAMLCommand"); +var de_AssumeRoleWithWebIdentityCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseXmlBody)(output.body, context); + let contents = {}; + contents = de_AssumeRoleWithWebIdentityResponse(data.AssumeRoleWithWebIdentityResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_AssumeRoleWithWebIdentityCommand"); +var de_AssumeRootCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseXmlBody)(output.body, context); + let contents = {}; + contents = de_AssumeRootResponse(data.AssumeRootResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_AssumeRootCommand"); +var de_DecodeAuthorizationMessageCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseXmlBody)(output.body, context); + let contents = {}; + contents = de_DecodeAuthorizationMessageResponse(data.DecodeAuthorizationMessageResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_DecodeAuthorizationMessageCommand"); +var de_GetAccessKeyInfoCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseXmlBody)(output.body, context); + let contents = {}; + contents = de_GetAccessKeyInfoResponse(data.GetAccessKeyInfoResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_GetAccessKeyInfoCommand"); +var de_GetCallerIdentityCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseXmlBody)(output.body, context); + let contents = {}; + contents = de_GetCallerIdentityResponse(data.GetCallerIdentityResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_GetCallerIdentityCommand"); +var de_GetFederationTokenCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseXmlBody)(output.body, context); + let contents = {}; + contents = de_GetFederationTokenResponse(data.GetFederationTokenResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_GetFederationTokenCommand"); +var de_GetSessionTokenCommand = /* @__PURE__ */ __name(async (output, context) => { + if (output.statusCode >= 300) { + return de_CommandError(output, context); + } + const data = await (0, import_core.parseXmlBody)(output.body, context); + let contents = {}; + contents = de_GetSessionTokenResponse(data.GetSessionTokenResult, context); + const response = { + $metadata: deserializeMetadata(output), + ...contents + }; + return response; +}, "de_GetSessionTokenCommand"); +var de_CommandError = /* @__PURE__ */ __name(async (output, context) => { + const parsedOutput = { + ...output, + body: await (0, import_core.parseXmlErrorBody)(output.body, context) + }; + const errorCode = loadQueryErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "ExpiredTokenException": + case "com.amazonaws.sts#ExpiredTokenException": + throw await de_ExpiredTokenExceptionRes(parsedOutput, context); + case "MalformedPolicyDocument": + case "com.amazonaws.sts#MalformedPolicyDocumentException": + throw await de_MalformedPolicyDocumentExceptionRes(parsedOutput, context); + case "PackedPolicyTooLarge": + case "com.amazonaws.sts#PackedPolicyTooLargeException": + throw await de_PackedPolicyTooLargeExceptionRes(parsedOutput, context); + case "RegionDisabledException": + case "com.amazonaws.sts#RegionDisabledException": + throw await de_RegionDisabledExceptionRes(parsedOutput, context); + case "IDPRejectedClaim": + case "com.amazonaws.sts#IDPRejectedClaimException": + throw await de_IDPRejectedClaimExceptionRes(parsedOutput, context); + case "InvalidIdentityToken": + case "com.amazonaws.sts#InvalidIdentityTokenException": + throw await de_InvalidIdentityTokenExceptionRes(parsedOutput, context); + case "IDPCommunicationError": + case "com.amazonaws.sts#IDPCommunicationErrorException": + throw await de_IDPCommunicationErrorExceptionRes(parsedOutput, context); + case "InvalidAuthorizationMessageException": + case "com.amazonaws.sts#InvalidAuthorizationMessageException": + throw await de_InvalidAuthorizationMessageExceptionRes(parsedOutput, context); + default: + const parsedBody = parsedOutput.body; + return throwDefaultError({ + output, + parsedBody: parsedBody.Error, + errorCode + }); + } +}, "de_CommandError"); +var de_ExpiredTokenExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_ExpiredTokenException(body.Error, context); + const exception = new ExpiredTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_ExpiredTokenExceptionRes"); +var de_IDPCommunicationErrorExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_IDPCommunicationErrorException(body.Error, context); + const exception = new IDPCommunicationErrorException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_IDPCommunicationErrorExceptionRes"); +var de_IDPRejectedClaimExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_IDPRejectedClaimException(body.Error, context); + const exception = new IDPRejectedClaimException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_IDPRejectedClaimExceptionRes"); +var de_InvalidAuthorizationMessageExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_InvalidAuthorizationMessageException(body.Error, context); + const exception = new InvalidAuthorizationMessageException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_InvalidAuthorizationMessageExceptionRes"); +var de_InvalidIdentityTokenExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_InvalidIdentityTokenException(body.Error, context); + const exception = new InvalidIdentityTokenException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_InvalidIdentityTokenExceptionRes"); +var de_MalformedPolicyDocumentExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_MalformedPolicyDocumentException(body.Error, context); + const exception = new MalformedPolicyDocumentException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_MalformedPolicyDocumentExceptionRes"); +var de_PackedPolicyTooLargeExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_PackedPolicyTooLargeException(body.Error, context); + const exception = new PackedPolicyTooLargeException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_PackedPolicyTooLargeExceptionRes"); +var de_RegionDisabledExceptionRes = /* @__PURE__ */ __name(async (parsedOutput, context) => { + const body = parsedOutput.body; + const deserialized = de_RegionDisabledException(body.Error, context); + const exception = new RegionDisabledException({ + $metadata: deserializeMetadata(parsedOutput), + ...deserialized + }); + return (0, import_smithy_client.decorateServiceException)(exception, body); +}, "de_RegionDisabledExceptionRes"); +var se_AssumeRoleRequest = /* @__PURE__ */ __name((input, context) => { + var _a2, _b, _c, _d; + const entries = {}; + if (input[_RA] != null) { + entries[_RA] = input[_RA]; + } + if (input[_RSN] != null) { + entries[_RSN] = input[_RSN]; + } + if (input[_PA] != null) { + const memberEntries = se_policyDescriptorListType(input[_PA], context); + if (((_a2 = input[_PA]) == null ? void 0 : _a2.length) === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input[_P] != null) { + entries[_P] = input[_P]; + } + if (input[_DS] != null) { + entries[_DS] = input[_DS]; + } + if (input[_T] != null) { + const memberEntries = se_tagListType(input[_T], context); + if (((_b = input[_T]) == null ? void 0 : _b.length) === 0) { + entries.Tags = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `Tags.${key}`; + entries[loc] = value; + }); + } + if (input[_TTK] != null) { + const memberEntries = se_tagKeyListType(input[_TTK], context); + if (((_c = input[_TTK]) == null ? void 0 : _c.length) === 0) { + entries.TransitiveTagKeys = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `TransitiveTagKeys.${key}`; + entries[loc] = value; + }); + } + if (input[_EI] != null) { + entries[_EI] = input[_EI]; + } + if (input[_SN] != null) { + entries[_SN] = input[_SN]; + } + if (input[_TC] != null) { + entries[_TC] = input[_TC]; + } + if (input[_SI] != null) { + entries[_SI] = input[_SI]; + } + if (input[_PC] != null) { + const memberEntries = se_ProvidedContextsListType(input[_PC], context); + if (((_d = input[_PC]) == null ? void 0 : _d.length) === 0) { + entries.ProvidedContexts = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `ProvidedContexts.${key}`; + entries[loc] = value; + }); + } + return entries; +}, "se_AssumeRoleRequest"); +var se_AssumeRoleWithSAMLRequest = /* @__PURE__ */ __name((input, context) => { + var _a2; + const entries = {}; + if (input[_RA] != null) { + entries[_RA] = input[_RA]; + } + if (input[_PAr] != null) { + entries[_PAr] = input[_PAr]; + } + if (input[_SAMLA] != null) { + entries[_SAMLA] = input[_SAMLA]; + } + if (input[_PA] != null) { + const memberEntries = se_policyDescriptorListType(input[_PA], context); + if (((_a2 = input[_PA]) == null ? void 0 : _a2.length) === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input[_P] != null) { + entries[_P] = input[_P]; + } + if (input[_DS] != null) { + entries[_DS] = input[_DS]; + } + return entries; +}, "se_AssumeRoleWithSAMLRequest"); +var se_AssumeRoleWithWebIdentityRequest = /* @__PURE__ */ __name((input, context) => { + var _a2; + const entries = {}; + if (input[_RA] != null) { + entries[_RA] = input[_RA]; + } + if (input[_RSN] != null) { + entries[_RSN] = input[_RSN]; + } + if (input[_WIT] != null) { + entries[_WIT] = input[_WIT]; + } + if (input[_PI] != null) { + entries[_PI] = input[_PI]; + } + if (input[_PA] != null) { + const memberEntries = se_policyDescriptorListType(input[_PA], context); + if (((_a2 = input[_PA]) == null ? void 0 : _a2.length) === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input[_P] != null) { + entries[_P] = input[_P]; + } + if (input[_DS] != null) { + entries[_DS] = input[_DS]; + } + return entries; +}, "se_AssumeRoleWithWebIdentityRequest"); +var se_AssumeRootRequest = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_TP] != null) { + entries[_TP] = input[_TP]; + } + if (input[_TPA] != null) { + const memberEntries = se_PolicyDescriptorType(input[_TPA], context); + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `TaskPolicyArn.${key}`; + entries[loc] = value; + }); + } + if (input[_DS] != null) { + entries[_DS] = input[_DS]; + } + return entries; +}, "se_AssumeRootRequest"); +var se_DecodeAuthorizationMessageRequest = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_EM] != null) { + entries[_EM] = input[_EM]; + } + return entries; +}, "se_DecodeAuthorizationMessageRequest"); +var se_GetAccessKeyInfoRequest = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_AKI] != null) { + entries[_AKI] = input[_AKI]; + } + return entries; +}, "se_GetAccessKeyInfoRequest"); +var se_GetCallerIdentityRequest = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + return entries; +}, "se_GetCallerIdentityRequest"); +var se_GetFederationTokenRequest = /* @__PURE__ */ __name((input, context) => { + var _a2, _b; + const entries = {}; + if (input[_N] != null) { + entries[_N] = input[_N]; + } + if (input[_P] != null) { + entries[_P] = input[_P]; + } + if (input[_PA] != null) { + const memberEntries = se_policyDescriptorListType(input[_PA], context); + if (((_a2 = input[_PA]) == null ? void 0 : _a2.length) === 0) { + entries.PolicyArns = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `PolicyArns.${key}`; + entries[loc] = value; + }); + } + if (input[_DS] != null) { + entries[_DS] = input[_DS]; + } + if (input[_T] != null) { + const memberEntries = se_tagListType(input[_T], context); + if (((_b = input[_T]) == null ? void 0 : _b.length) === 0) { + entries.Tags = []; + } + Object.entries(memberEntries).forEach(([key, value]) => { + const loc = `Tags.${key}`; + entries[loc] = value; + }); + } + return entries; +}, "se_GetFederationTokenRequest"); +var se_GetSessionTokenRequest = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_DS] != null) { + entries[_DS] = input[_DS]; + } + if (input[_SN] != null) { + entries[_SN] = input[_SN]; + } + if (input[_TC] != null) { + entries[_TC] = input[_TC]; + } + return entries; +}, "se_GetSessionTokenRequest"); +var se_policyDescriptorListType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_PolicyDescriptorType(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}, "se_policyDescriptorListType"); +var se_PolicyDescriptorType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_a] != null) { + entries[_a] = input[_a]; + } + return entries; +}, "se_PolicyDescriptorType"); +var se_ProvidedContext = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_PAro] != null) { + entries[_PAro] = input[_PAro]; + } + if (input[_CA] != null) { + entries[_CA] = input[_CA]; + } + return entries; +}, "se_ProvidedContext"); +var se_ProvidedContextsListType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_ProvidedContext(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}, "se_ProvidedContextsListType"); +var se_Tag = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + if (input[_K] != null) { + entries[_K] = input[_K]; + } + if (input[_Va] != null) { + entries[_Va] = input[_Va]; + } + return entries; +}, "se_Tag"); +var se_tagKeyListType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + entries[`member.${counter}`] = entry; + counter++; + } + return entries; +}, "se_tagKeyListType"); +var se_tagListType = /* @__PURE__ */ __name((input, context) => { + const entries = {}; + let counter = 1; + for (const entry of input) { + if (entry === null) { + continue; + } + const memberEntries = se_Tag(entry, context); + Object.entries(memberEntries).forEach(([key, value]) => { + entries[`member.${counter}.${key}`] = value; + }); + counter++; + } + return entries; +}, "se_tagListType"); +var de_AssumedRoleUser = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_ARI] != null) { + contents[_ARI] = (0, import_smithy_client.expectString)(output[_ARI]); + } + if (output[_Ar] != null) { + contents[_Ar] = (0, import_smithy_client.expectString)(output[_Ar]); + } + return contents; +}, "de_AssumedRoleUser"); +var de_AssumeRoleResponse = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_C] != null) { + contents[_C] = de_Credentials(output[_C], context); + } + if (output[_ARU] != null) { + contents[_ARU] = de_AssumedRoleUser(output[_ARU], context); + } + if (output[_PPS] != null) { + contents[_PPS] = (0, import_smithy_client.strictParseInt32)(output[_PPS]); + } + if (output[_SI] != null) { + contents[_SI] = (0, import_smithy_client.expectString)(output[_SI]); + } + return contents; +}, "de_AssumeRoleResponse"); +var de_AssumeRoleWithSAMLResponse = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_C] != null) { + contents[_C] = de_Credentials(output[_C], context); + } + if (output[_ARU] != null) { + contents[_ARU] = de_AssumedRoleUser(output[_ARU], context); + } + if (output[_PPS] != null) { + contents[_PPS] = (0, import_smithy_client.strictParseInt32)(output[_PPS]); + } + if (output[_S] != null) { + contents[_S] = (0, import_smithy_client.expectString)(output[_S]); + } + if (output[_ST] != null) { + contents[_ST] = (0, import_smithy_client.expectString)(output[_ST]); + } + if (output[_I] != null) { + contents[_I] = (0, import_smithy_client.expectString)(output[_I]); + } + if (output[_Au] != null) { + contents[_Au] = (0, import_smithy_client.expectString)(output[_Au]); + } + if (output[_NQ] != null) { + contents[_NQ] = (0, import_smithy_client.expectString)(output[_NQ]); + } + if (output[_SI] != null) { + contents[_SI] = (0, import_smithy_client.expectString)(output[_SI]); + } + return contents; +}, "de_AssumeRoleWithSAMLResponse"); +var de_AssumeRoleWithWebIdentityResponse = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_C] != null) { + contents[_C] = de_Credentials(output[_C], context); + } + if (output[_SFWIT] != null) { + contents[_SFWIT] = (0, import_smithy_client.expectString)(output[_SFWIT]); + } + if (output[_ARU] != null) { + contents[_ARU] = de_AssumedRoleUser(output[_ARU], context); + } + if (output[_PPS] != null) { + contents[_PPS] = (0, import_smithy_client.strictParseInt32)(output[_PPS]); + } + if (output[_Pr] != null) { + contents[_Pr] = (0, import_smithy_client.expectString)(output[_Pr]); + } + if (output[_Au] != null) { + contents[_Au] = (0, import_smithy_client.expectString)(output[_Au]); + } + if (output[_SI] != null) { + contents[_SI] = (0, import_smithy_client.expectString)(output[_SI]); + } + return contents; +}, "de_AssumeRoleWithWebIdentityResponse"); +var de_AssumeRootResponse = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_C] != null) { + contents[_C] = de_Credentials(output[_C], context); + } + if (output[_SI] != null) { + contents[_SI] = (0, import_smithy_client.expectString)(output[_SI]); + } + return contents; +}, "de_AssumeRootResponse"); +var de_Credentials = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_AKI] != null) { + contents[_AKI] = (0, import_smithy_client.expectString)(output[_AKI]); + } + if (output[_SAK] != null) { + contents[_SAK] = (0, import_smithy_client.expectString)(output[_SAK]); + } + if (output[_STe] != null) { + contents[_STe] = (0, import_smithy_client.expectString)(output[_STe]); + } + if (output[_E] != null) { + contents[_E] = (0, import_smithy_client.expectNonNull)((0, import_smithy_client.parseRfc3339DateTimeWithOffset)(output[_E])); + } + return contents; +}, "de_Credentials"); +var de_DecodeAuthorizationMessageResponse = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_DM] != null) { + contents[_DM] = (0, import_smithy_client.expectString)(output[_DM]); + } + return contents; +}, "de_DecodeAuthorizationMessageResponse"); +var de_ExpiredTokenException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client.expectString)(output[_m]); + } + return contents; +}, "de_ExpiredTokenException"); +var de_FederatedUser = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_FUI] != null) { + contents[_FUI] = (0, import_smithy_client.expectString)(output[_FUI]); + } + if (output[_Ar] != null) { + contents[_Ar] = (0, import_smithy_client.expectString)(output[_Ar]); + } + return contents; +}, "de_FederatedUser"); +var de_GetAccessKeyInfoResponse = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_Ac] != null) { + contents[_Ac] = (0, import_smithy_client.expectString)(output[_Ac]); + } + return contents; +}, "de_GetAccessKeyInfoResponse"); +var de_GetCallerIdentityResponse = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_UI] != null) { + contents[_UI] = (0, import_smithy_client.expectString)(output[_UI]); + } + if (output[_Ac] != null) { + contents[_Ac] = (0, import_smithy_client.expectString)(output[_Ac]); + } + if (output[_Ar] != null) { + contents[_Ar] = (0, import_smithy_client.expectString)(output[_Ar]); + } + return contents; +}, "de_GetCallerIdentityResponse"); +var de_GetFederationTokenResponse = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_C] != null) { + contents[_C] = de_Credentials(output[_C], context); + } + if (output[_FU] != null) { + contents[_FU] = de_FederatedUser(output[_FU], context); + } + if (output[_PPS] != null) { + contents[_PPS] = (0, import_smithy_client.strictParseInt32)(output[_PPS]); + } + return contents; +}, "de_GetFederationTokenResponse"); +var de_GetSessionTokenResponse = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_C] != null) { + contents[_C] = de_Credentials(output[_C], context); + } + return contents; +}, "de_GetSessionTokenResponse"); +var de_IDPCommunicationErrorException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client.expectString)(output[_m]); + } + return contents; +}, "de_IDPCommunicationErrorException"); +var de_IDPRejectedClaimException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client.expectString)(output[_m]); + } + return contents; +}, "de_IDPRejectedClaimException"); +var de_InvalidAuthorizationMessageException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client.expectString)(output[_m]); + } + return contents; +}, "de_InvalidAuthorizationMessageException"); +var de_InvalidIdentityTokenException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client.expectString)(output[_m]); + } + return contents; +}, "de_InvalidIdentityTokenException"); +var de_MalformedPolicyDocumentException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client.expectString)(output[_m]); + } + return contents; +}, "de_MalformedPolicyDocumentException"); +var de_PackedPolicyTooLargeException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client.expectString)(output[_m]); + } + return contents; +}, "de_PackedPolicyTooLargeException"); +var de_RegionDisabledException = /* @__PURE__ */ __name((output, context) => { + const contents = {}; + if (output[_m] != null) { + contents[_m] = (0, import_smithy_client.expectString)(output[_m]); + } + return contents; +}, "de_RegionDisabledException"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); +var throwDefaultError = (0, import_smithy_client.withBaseException)(STSServiceException); +var buildHttpRpcRequest = /* @__PURE__ */ __name(async (context, headers, path, resolvedHostname, body) => { + const { hostname, protocol = "https", port, path: basePath } = await context.endpoint(); + const contents = { + protocol, + hostname, + port, + method: "POST", + path: basePath.endsWith("/") ? basePath.slice(0, -1) + path : basePath + path, + headers + }; + if (resolvedHostname !== void 0) { + contents.hostname = resolvedHostname; + } + if (body !== void 0) { + contents.body = body; + } + return new import_protocol_http.HttpRequest(contents); +}, "buildHttpRpcRequest"); +var SHARED_HEADERS = { + "content-type": "application/x-www-form-urlencoded" +}; +var _ = "2011-06-15"; +var _A = "Action"; +var _AKI = "AccessKeyId"; +var _AR = "AssumeRole"; +var _ARI = "AssumedRoleId"; +var _ARU = "AssumedRoleUser"; +var _ARWSAML = "AssumeRoleWithSAML"; +var _ARWWI = "AssumeRoleWithWebIdentity"; +var _ARs = "AssumeRoot"; +var _Ac = "Account"; +var _Ar = "Arn"; +var _Au = "Audience"; +var _C = "Credentials"; +var _CA = "ContextAssertion"; +var _DAM = "DecodeAuthorizationMessage"; +var _DM = "DecodedMessage"; +var _DS = "DurationSeconds"; +var _E = "Expiration"; +var _EI = "ExternalId"; +var _EM = "EncodedMessage"; +var _FU = "FederatedUser"; +var _FUI = "FederatedUserId"; +var _GAKI = "GetAccessKeyInfo"; +var _GCI = "GetCallerIdentity"; +var _GFT = "GetFederationToken"; +var _GST = "GetSessionToken"; +var _I = "Issuer"; +var _K = "Key"; +var _N = "Name"; +var _NQ = "NameQualifier"; +var _P = "Policy"; +var _PA = "PolicyArns"; +var _PAr = "PrincipalArn"; +var _PAro = "ProviderArn"; +var _PC = "ProvidedContexts"; +var _PI = "ProviderId"; +var _PPS = "PackedPolicySize"; +var _Pr = "Provider"; +var _RA = "RoleArn"; +var _RSN = "RoleSessionName"; +var _S = "Subject"; +var _SAK = "SecretAccessKey"; +var _SAMLA = "SAMLAssertion"; +var _SFWIT = "SubjectFromWebIdentityToken"; +var _SI = "SourceIdentity"; +var _SN = "SerialNumber"; +var _ST = "SubjectType"; +var _STe = "SessionToken"; +var _T = "Tags"; +var _TC = "TokenCode"; +var _TP = "TargetPrincipal"; +var _TPA = "TaskPolicyArn"; +var _TTK = "TransitiveTagKeys"; +var _UI = "UserId"; +var _V = "Version"; +var _Va = "Value"; +var _WIT = "WebIdentityToken"; +var _a = "arn"; +var _m = "message"; +var buildFormUrlencodedString = /* @__PURE__ */ __name((formEntries) => Object.entries(formEntries).map(([key, value]) => (0, import_smithy_client.extendedEncodeURIComponent)(key) + "=" + (0, import_smithy_client.extendedEncodeURIComponent)(value)).join("&"), "buildFormUrlencodedString"); +var loadQueryErrorCode = /* @__PURE__ */ __name((output, data) => { + var _a2; + if (((_a2 = data.Error) == null ? void 0 : _a2.Code) !== void 0) { + return data.Error.Code; + } + if (output.statusCode == 404) { + return "NotFound"; + } +}, "loadQueryErrorCode"); + +// src/commands/AssumeRoleCommand.ts +var _AssumeRoleCommand = class _AssumeRoleCommand extends import_smithy_client.Command.classBuilder().ep(import_EndpointParameters.commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSecurityTokenServiceV20110615", "AssumeRole", {}).n("STSClient", "AssumeRoleCommand").f(void 0, AssumeRoleResponseFilterSensitiveLog).ser(se_AssumeRoleCommand).de(de_AssumeRoleCommand).build() { +}; +__name(_AssumeRoleCommand, "AssumeRoleCommand"); +var AssumeRoleCommand = _AssumeRoleCommand; + +// src/commands/AssumeRoleWithSAMLCommand.ts + + + +var import_EndpointParameters2 = __nccwpck_require__(2912); +var _AssumeRoleWithSAMLCommand = class _AssumeRoleWithSAMLCommand extends import_smithy_client.Command.classBuilder().ep(import_EndpointParameters2.commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSecurityTokenServiceV20110615", "AssumeRoleWithSAML", {}).n("STSClient", "AssumeRoleWithSAMLCommand").f(AssumeRoleWithSAMLRequestFilterSensitiveLog, AssumeRoleWithSAMLResponseFilterSensitiveLog).ser(se_AssumeRoleWithSAMLCommand).de(de_AssumeRoleWithSAMLCommand).build() { +}; +__name(_AssumeRoleWithSAMLCommand, "AssumeRoleWithSAMLCommand"); +var AssumeRoleWithSAMLCommand = _AssumeRoleWithSAMLCommand; + +// src/commands/AssumeRoleWithWebIdentityCommand.ts + + + +var import_EndpointParameters3 = __nccwpck_require__(2912); +var _AssumeRoleWithWebIdentityCommand = class _AssumeRoleWithWebIdentityCommand extends import_smithy_client.Command.classBuilder().ep(import_EndpointParameters3.commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSecurityTokenServiceV20110615", "AssumeRoleWithWebIdentity", {}).n("STSClient", "AssumeRoleWithWebIdentityCommand").f(AssumeRoleWithWebIdentityRequestFilterSensitiveLog, AssumeRoleWithWebIdentityResponseFilterSensitiveLog).ser(se_AssumeRoleWithWebIdentityCommand).de(de_AssumeRoleWithWebIdentityCommand).build() { +}; +__name(_AssumeRoleWithWebIdentityCommand, "AssumeRoleWithWebIdentityCommand"); +var AssumeRoleWithWebIdentityCommand = _AssumeRoleWithWebIdentityCommand; + +// src/commands/AssumeRootCommand.ts + + + +var import_EndpointParameters4 = __nccwpck_require__(2912); +var _AssumeRootCommand = class _AssumeRootCommand extends import_smithy_client.Command.classBuilder().ep(import_EndpointParameters4.commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSecurityTokenServiceV20110615", "AssumeRoot", {}).n("STSClient", "AssumeRootCommand").f(void 0, AssumeRootResponseFilterSensitiveLog).ser(se_AssumeRootCommand).de(de_AssumeRootCommand).build() { +}; +__name(_AssumeRootCommand, "AssumeRootCommand"); +var AssumeRootCommand = _AssumeRootCommand; + +// src/commands/DecodeAuthorizationMessageCommand.ts + + + +var import_EndpointParameters5 = __nccwpck_require__(2912); +var _DecodeAuthorizationMessageCommand = class _DecodeAuthorizationMessageCommand extends import_smithy_client.Command.classBuilder().ep(import_EndpointParameters5.commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSecurityTokenServiceV20110615", "DecodeAuthorizationMessage", {}).n("STSClient", "DecodeAuthorizationMessageCommand").f(void 0, void 0).ser(se_DecodeAuthorizationMessageCommand).de(de_DecodeAuthorizationMessageCommand).build() { +}; +__name(_DecodeAuthorizationMessageCommand, "DecodeAuthorizationMessageCommand"); +var DecodeAuthorizationMessageCommand = _DecodeAuthorizationMessageCommand; + +// src/commands/GetAccessKeyInfoCommand.ts + + + +var import_EndpointParameters6 = __nccwpck_require__(2912); +var _GetAccessKeyInfoCommand = class _GetAccessKeyInfoCommand extends import_smithy_client.Command.classBuilder().ep(import_EndpointParameters6.commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSecurityTokenServiceV20110615", "GetAccessKeyInfo", {}).n("STSClient", "GetAccessKeyInfoCommand").f(void 0, void 0).ser(se_GetAccessKeyInfoCommand).de(de_GetAccessKeyInfoCommand).build() { +}; +__name(_GetAccessKeyInfoCommand, "GetAccessKeyInfoCommand"); +var GetAccessKeyInfoCommand = _GetAccessKeyInfoCommand; + +// src/commands/GetCallerIdentityCommand.ts + + + +var import_EndpointParameters7 = __nccwpck_require__(2912); +var _GetCallerIdentityCommand = class _GetCallerIdentityCommand extends import_smithy_client.Command.classBuilder().ep(import_EndpointParameters7.commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSecurityTokenServiceV20110615", "GetCallerIdentity", {}).n("STSClient", "GetCallerIdentityCommand").f(void 0, void 0).ser(se_GetCallerIdentityCommand).de(de_GetCallerIdentityCommand).build() { +}; +__name(_GetCallerIdentityCommand, "GetCallerIdentityCommand"); +var GetCallerIdentityCommand = _GetCallerIdentityCommand; + +// src/commands/GetFederationTokenCommand.ts + + + +var import_EndpointParameters8 = __nccwpck_require__(2912); +var _GetFederationTokenCommand = class _GetFederationTokenCommand extends import_smithy_client.Command.classBuilder().ep(import_EndpointParameters8.commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSecurityTokenServiceV20110615", "GetFederationToken", {}).n("STSClient", "GetFederationTokenCommand").f(void 0, GetFederationTokenResponseFilterSensitiveLog).ser(se_GetFederationTokenCommand).de(de_GetFederationTokenCommand).build() { +}; +__name(_GetFederationTokenCommand, "GetFederationTokenCommand"); +var GetFederationTokenCommand = _GetFederationTokenCommand; + +// src/commands/GetSessionTokenCommand.ts + + + +var import_EndpointParameters9 = __nccwpck_require__(2912); +var _GetSessionTokenCommand = class _GetSessionTokenCommand extends import_smithy_client.Command.classBuilder().ep(import_EndpointParameters9.commonParams).m(function(Command, cs, config, o) { + return [ + (0, import_middleware_serde.getSerdePlugin)(config, this.serialize, this.deserialize), + (0, import_middleware_endpoint.getEndpointPlugin)(config, Command.getEndpointParameterInstructions()) + ]; +}).s("AWSSecurityTokenServiceV20110615", "GetSessionToken", {}).n("STSClient", "GetSessionTokenCommand").f(void 0, GetSessionTokenResponseFilterSensitiveLog).ser(se_GetSessionTokenCommand).de(de_GetSessionTokenCommand).build() { +}; +__name(_GetSessionTokenCommand, "GetSessionTokenCommand"); +var GetSessionTokenCommand = _GetSessionTokenCommand; + +// src/STS.ts +var import_STSClient = __nccwpck_require__(1548); +var commands = { + AssumeRoleCommand, + AssumeRoleWithSAMLCommand, + AssumeRoleWithWebIdentityCommand, + AssumeRootCommand, + DecodeAuthorizationMessageCommand, + GetAccessKeyInfoCommand, + GetCallerIdentityCommand, + GetFederationTokenCommand, + GetSessionTokenCommand +}; +var _STS = class _STS extends import_STSClient.STSClient { +}; +__name(_STS, "STS"); +var STS = _STS; +(0, import_smithy_client.createAggregatedClient)(commands, STS); + +// src/index.ts +var import_EndpointParameters10 = __nccwpck_require__(2912); + +// src/defaultStsRoleAssumers.ts +var import_client = __nccwpck_require__(5152); +var ASSUME_ROLE_DEFAULT_REGION = "us-east-1"; +var getAccountIdFromAssumedRoleUser = /* @__PURE__ */ __name((assumedRoleUser) => { + if (typeof (assumedRoleUser == null ? void 0 : assumedRoleUser.Arn) === "string") { + const arnComponents = assumedRoleUser.Arn.split(":"); + if (arnComponents.length > 4 && arnComponents[4] !== "") { + return arnComponents[4]; + } + } + return void 0; +}, "getAccountIdFromAssumedRoleUser"); +var resolveRegion = /* @__PURE__ */ __name(async (_region, _parentRegion, credentialProviderLogger) => { + var _a2; + const region = typeof _region === "function" ? await _region() : _region; + const parentRegion = typeof _parentRegion === "function" ? await _parentRegion() : _parentRegion; + (_a2 = credentialProviderLogger == null ? void 0 : credentialProviderLogger.debug) == null ? void 0 : _a2.call( + credentialProviderLogger, + "@aws-sdk/client-sts::resolveRegion", + "accepting first of:", + `${region} (provider)`, + `${parentRegion} (parent client)`, + `${ASSUME_ROLE_DEFAULT_REGION} (STS default)` + ); + return region ?? parentRegion ?? ASSUME_ROLE_DEFAULT_REGION; +}, "resolveRegion"); +var getDefaultRoleAssumer = /* @__PURE__ */ __name((stsOptions, stsClientCtor) => { + let stsClient; + let closureSourceCreds; + return async (sourceCreds, params) => { + var _a2, _b, _c; + closureSourceCreds = sourceCreds; + if (!stsClient) { + const { + logger = (_a2 = stsOptions == null ? void 0 : stsOptions.parentClientConfig) == null ? void 0 : _a2.logger, + region, + requestHandler = (_b = stsOptions == null ? void 0 : stsOptions.parentClientConfig) == null ? void 0 : _b.requestHandler, + credentialProviderLogger + } = stsOptions; + const resolvedRegion = await resolveRegion( + region, + (_c = stsOptions == null ? void 0 : stsOptions.parentClientConfig) == null ? void 0 : _c.region, + credentialProviderLogger + ); + const isCompatibleRequestHandler = !isH2(requestHandler); + stsClient = new stsClientCtor({ + // A hack to make sts client uses the credential in current closure. + credentialDefaultProvider: () => async () => closureSourceCreds, + region: resolvedRegion, + requestHandler: isCompatibleRequestHandler ? requestHandler : void 0, + logger + }); + } + const { Credentials: Credentials2, AssumedRoleUser: AssumedRoleUser2 } = await stsClient.send(new AssumeRoleCommand(params)); + if (!Credentials2 || !Credentials2.AccessKeyId || !Credentials2.SecretAccessKey) { + throw new Error(`Invalid response from STS.assumeRole call with role ${params.RoleArn}`); + } + const accountId = getAccountIdFromAssumedRoleUser(AssumedRoleUser2); + const credentials = { + accessKeyId: Credentials2.AccessKeyId, + secretAccessKey: Credentials2.SecretAccessKey, + sessionToken: Credentials2.SessionToken, + expiration: Credentials2.Expiration, + // TODO(credentialScope): access normally when shape is updated. + ...Credentials2.CredentialScope && { credentialScope: Credentials2.CredentialScope }, + ...accountId && { accountId } + }; + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_STS_ASSUME_ROLE", "i"); + return credentials; + }; +}, "getDefaultRoleAssumer"); +var getDefaultRoleAssumerWithWebIdentity = /* @__PURE__ */ __name((stsOptions, stsClientCtor) => { + let stsClient; + return async (params) => { + var _a2, _b, _c; + if (!stsClient) { + const { + logger = (_a2 = stsOptions == null ? void 0 : stsOptions.parentClientConfig) == null ? void 0 : _a2.logger, + region, + requestHandler = (_b = stsOptions == null ? void 0 : stsOptions.parentClientConfig) == null ? void 0 : _b.requestHandler, + credentialProviderLogger + } = stsOptions; + const resolvedRegion = await resolveRegion( + region, + (_c = stsOptions == null ? void 0 : stsOptions.parentClientConfig) == null ? void 0 : _c.region, + credentialProviderLogger + ); + const isCompatibleRequestHandler = !isH2(requestHandler); + stsClient = new stsClientCtor({ + region: resolvedRegion, + requestHandler: isCompatibleRequestHandler ? requestHandler : void 0, + logger + }); + } + const { Credentials: Credentials2, AssumedRoleUser: AssumedRoleUser2 } = await stsClient.send(new AssumeRoleWithWebIdentityCommand(params)); + if (!Credentials2 || !Credentials2.AccessKeyId || !Credentials2.SecretAccessKey) { + throw new Error(`Invalid response from STS.assumeRoleWithWebIdentity call with role ${params.RoleArn}`); + } + const accountId = getAccountIdFromAssumedRoleUser(AssumedRoleUser2); + const credentials = { + accessKeyId: Credentials2.AccessKeyId, + secretAccessKey: Credentials2.SecretAccessKey, + sessionToken: Credentials2.SessionToken, + expiration: Credentials2.Expiration, + // TODO(credentialScope): access normally when shape is updated. + ...Credentials2.CredentialScope && { credentialScope: Credentials2.CredentialScope }, + ...accountId && { accountId } + }; + if (accountId) { + (0, import_client.setCredentialFeature)(credentials, "RESOLVED_ACCOUNT_ID", "T"); + } + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_STS_ASSUME_ROLE_WEB_ID", "k"); + return credentials; + }; +}, "getDefaultRoleAssumerWithWebIdentity"); +var isH2 = /* @__PURE__ */ __name((requestHandler) => { + var _a2; + return ((_a2 = requestHandler == null ? void 0 : requestHandler.metadata) == null ? void 0 : _a2.handlerProtocol) === "h2"; +}, "isH2"); + +// src/defaultRoleAssumers.ts +var import_STSClient2 = __nccwpck_require__(1548); +var getCustomizableStsClientCtor = /* @__PURE__ */ __name((baseCtor, customizations) => { + var _a2; + if (!customizations) + return baseCtor; + else + return _a2 = class extends baseCtor { + constructor(config) { + super(config); + for (const customization of customizations) { + this.middlewareStack.use(customization); + } + } + }, __name(_a2, "CustomizableSTSClient"), _a2; +}, "getCustomizableStsClientCtor"); +var getDefaultRoleAssumer2 = /* @__PURE__ */ __name((stsOptions = {}, stsPlugins) => getDefaultRoleAssumer(stsOptions, getCustomizableStsClientCtor(import_STSClient2.STSClient, stsPlugins)), "getDefaultRoleAssumer"); +var getDefaultRoleAssumerWithWebIdentity2 = /* @__PURE__ */ __name((stsOptions = {}, stsPlugins) => getDefaultRoleAssumerWithWebIdentity(stsOptions, getCustomizableStsClientCtor(import_STSClient2.STSClient, stsPlugins)), "getDefaultRoleAssumerWithWebIdentity"); +var decorateDefaultCredentialProvider = /* @__PURE__ */ __name((provider) => (input) => provider({ + roleAssumer: getDefaultRoleAssumer2(input), + roleAssumerWithWebIdentity: getDefaultRoleAssumerWithWebIdentity2(input), + ...input +}), "decorateDefaultCredentialProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 2769: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRuntimeConfig = void 0; +const tslib_1 = __nccwpck_require__(1860); +const package_json_1 = tslib_1.__importDefault(__nccwpck_require__(4959)); +const core_1 = __nccwpck_require__(8704); +const credential_provider_node_1 = __nccwpck_require__(5861); +const util_user_agent_node_1 = __nccwpck_require__(1656); +const config_resolver_1 = __nccwpck_require__(9316); +const core_2 = __nccwpck_require__(402); +const hash_node_1 = __nccwpck_require__(5092); +const middleware_retry_1 = __nccwpck_require__(9618); +const node_config_provider_1 = __nccwpck_require__(5704); +const node_http_handler_1 = __nccwpck_require__(1279); +const util_body_length_node_1 = __nccwpck_require__(3638); +const util_retry_1 = __nccwpck_require__(5518); +const runtimeConfig_shared_1 = __nccwpck_require__(3238); +const smithy_client_1 = __nccwpck_require__(1411); +const util_defaults_mode_node_1 = __nccwpck_require__(5435); +const smithy_client_2 = __nccwpck_require__(1411); +const getRuntimeConfig = (config) => { + (0, smithy_client_2.emitWarningIfUnsupportedVersion)(process.version); + const defaultsMode = (0, util_defaults_mode_node_1.resolveDefaultsModeConfig)(config); + const defaultConfigProvider = () => defaultsMode().then(smithy_client_1.loadConfigsForDefaultMode); + const clientSharedValues = (0, runtimeConfig_shared_1.getRuntimeConfig)(config); + (0, core_1.emitWarningIfUnsupportedVersion)(process.version); + const profileConfig = { profile: config?.profile }; + return { + ...clientSharedValues, + ...config, + runtime: "node", + defaultsMode, + bodyLengthChecker: config?.bodyLengthChecker ?? util_body_length_node_1.calculateBodyLength, + credentialDefaultProvider: config?.credentialDefaultProvider ?? credential_provider_node_1.defaultProvider, + defaultUserAgentProvider: config?.defaultUserAgentProvider ?? + (0, util_user_agent_node_1.createDefaultUserAgentProvider)({ serviceId: clientSharedValues.serviceId, clientVersion: package_json_1.default.version }), + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4") || + (async (idProps) => await (0, credential_provider_node_1.defaultProvider)(idProps?.__config || {})()), + signer: new core_1.AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new core_2.NoAuthSigner(), + }, + ], + maxAttempts: config?.maxAttempts ?? (0, node_config_provider_1.loadConfig)(middleware_retry_1.NODE_MAX_ATTEMPT_CONFIG_OPTIONS, config), + region: config?.region ?? + (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_REGION_CONFIG_OPTIONS, { ...config_resolver_1.NODE_REGION_CONFIG_FILE_OPTIONS, ...profileConfig }), + requestHandler: node_http_handler_1.NodeHttpHandler.create(config?.requestHandler ?? defaultConfigProvider), + retryMode: config?.retryMode ?? + (0, node_config_provider_1.loadConfig)({ + ...middleware_retry_1.NODE_RETRY_MODE_CONFIG_OPTIONS, + default: async () => (await defaultConfigProvider()).retryMode || util_retry_1.DEFAULT_RETRY_MODE, + }, config), + sha256: config?.sha256 ?? hash_node_1.Hash.bind(null, "sha256"), + streamCollector: config?.streamCollector ?? node_http_handler_1.streamCollector, + useDualstackEndpoint: config?.useDualstackEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, profileConfig), + useFipsEndpoint: config?.useFipsEndpoint ?? (0, node_config_provider_1.loadConfig)(config_resolver_1.NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, profileConfig), + userAgentAppId: config?.userAgentAppId ?? (0, node_config_provider_1.loadConfig)(util_user_agent_node_1.NODE_APP_ID_CONFIG_OPTIONS, profileConfig), + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; + + +/***/ }), + +/***/ 3238: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getRuntimeConfig = void 0; +const core_1 = __nccwpck_require__(8704); +const core_2 = __nccwpck_require__(402); +const smithy_client_1 = __nccwpck_require__(1411); +const url_parser_1 = __nccwpck_require__(4494); +const util_base64_1 = __nccwpck_require__(8385); +const util_utf8_1 = __nccwpck_require__(1577); +const httpAuthSchemeProvider_1 = __nccwpck_require__(9232); +const endpointResolver_1 = __nccwpck_require__(1030); +const getRuntimeConfig = (config) => { + return { + apiVersion: "2011-06-15", + base64Decoder: config?.base64Decoder ?? util_base64_1.fromBase64, + base64Encoder: config?.base64Encoder ?? util_base64_1.toBase64, + disableHostPrefix: config?.disableHostPrefix ?? false, + endpointProvider: config?.endpointProvider ?? endpointResolver_1.defaultEndpointResolver, + extensions: config?.extensions ?? [], + httpAuthSchemeProvider: config?.httpAuthSchemeProvider ?? httpAuthSchemeProvider_1.defaultSTSHttpAuthSchemeProvider, + httpAuthSchemes: config?.httpAuthSchemes ?? [ + { + schemeId: "aws.auth#sigv4", + identityProvider: (ipc) => ipc.getIdentityProvider("aws.auth#sigv4"), + signer: new core_1.AwsSdkSigV4Signer(), + }, + { + schemeId: "smithy.api#noAuth", + identityProvider: (ipc) => ipc.getIdentityProvider("smithy.api#noAuth") || (async () => ({})), + signer: new core_2.NoAuthSigner(), + }, + ], + logger: config?.logger ?? new smithy_client_1.NoOpLogger(), + serviceId: config?.serviceId ?? "STS", + urlParser: config?.urlParser ?? url_parser_1.parseUrl, + utf8Decoder: config?.utf8Decoder ?? util_utf8_1.fromUtf8, + utf8Encoder: config?.utf8Encoder ?? util_utf8_1.toUtf8, + }; +}; +exports.getRuntimeConfig = getRuntimeConfig; + + +/***/ }), + +/***/ 309: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveRuntimeExtensions = void 0; +const region_config_resolver_1 = __nccwpck_require__(6463); +const protocol_http_1 = __nccwpck_require__(2356); +const smithy_client_1 = __nccwpck_require__(1411); +const httpAuthExtensionConfiguration_1 = __nccwpck_require__(9915); +const asPartial = (t) => t; +const resolveRuntimeExtensions = (runtimeConfig, extensions) => { + const extensionConfiguration = { + ...asPartial((0, region_config_resolver_1.getAwsRegionExtensionConfiguration)(runtimeConfig)), + ...asPartial((0, smithy_client_1.getDefaultExtensionConfiguration)(runtimeConfig)), + ...asPartial((0, protocol_http_1.getHttpHandlerExtensionConfiguration)(runtimeConfig)), + ...asPartial((0, httpAuthExtensionConfiguration_1.getHttpAuthExtensionConfiguration)(runtimeConfig)), + }; + extensions.forEach((extension) => extension.configure(extensionConfiguration)); + return { + ...runtimeConfig, + ...(0, region_config_resolver_1.resolveAwsRegionExtensionConfiguration)(extensionConfiguration), + ...(0, smithy_client_1.resolveDefaultRuntimeConfig)(extensionConfiguration), + ...(0, protocol_http_1.resolveHttpHandlerRuntimeConfig)(extensionConfiguration), + ...(0, httpAuthExtensionConfiguration_1.resolveHttpAuthRuntimeConfig)(extensionConfiguration), + }; +}; +exports.resolveRuntimeExtensions = resolveRuntimeExtensions; + + +/***/ }), + +/***/ 8704: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const tslib_1 = __nccwpck_require__(1860); +tslib_1.__exportStar(__nccwpck_require__(5152), exports); +tslib_1.__exportStar(__nccwpck_require__(7523), exports); +tslib_1.__exportStar(__nccwpck_require__(7288), exports); + + +/***/ }), + +/***/ 5152: +/***/ ((module) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/client/index.ts +var client_exports = {}; +__export(client_exports, { + emitWarningIfUnsupportedVersion: () => emitWarningIfUnsupportedVersion, + setCredentialFeature: () => setCredentialFeature, + setFeature: () => setFeature, + state: () => state +}); +module.exports = __toCommonJS(client_exports); + +// src/submodules/client/emitWarningIfUnsupportedVersion.ts +var state = { + warningEmitted: false +}; +var emitWarningIfUnsupportedVersion = /* @__PURE__ */ __name((version) => { + if (version && !state.warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 18) { + state.warningEmitted = true; + process.emitWarning( + `NodeDeprecationWarning: The AWS SDK for JavaScript (v3) will +no longer support Node.js 16.x on January 6, 2025. + +To continue receiving updates to AWS services, bug fixes, and security +updates please upgrade to a supported Node.js LTS version. + +More information can be found at: https://a.co/74kJMmI` + ); + } +}, "emitWarningIfUnsupportedVersion"); + +// src/submodules/client/setCredentialFeature.ts +function setCredentialFeature(credentials, feature, value) { + if (!credentials.$source) { + credentials.$source = {}; + } + credentials.$source[feature] = value; + return credentials; +} +__name(setCredentialFeature, "setCredentialFeature"); + +// src/submodules/client/setFeature.ts +function setFeature(context, feature, value) { + if (!context.__aws_sdk_context) { + context.__aws_sdk_context = { + features: {} + }; + } else if (!context.__aws_sdk_context.features) { + context.__aws_sdk_context.features = {}; + } + context.__aws_sdk_context.features[feature] = value; +} +__name(setFeature, "setFeature"); +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 7523: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/httpAuthSchemes/index.ts +var httpAuthSchemes_exports = {}; +__export(httpAuthSchemes_exports, { + AWSSDKSigV4Signer: () => AWSSDKSigV4Signer, + AwsSdkSigV4ASigner: () => AwsSdkSigV4ASigner, + AwsSdkSigV4Signer: () => AwsSdkSigV4Signer, + NODE_SIGV4A_CONFIG_OPTIONS: () => NODE_SIGV4A_CONFIG_OPTIONS, + resolveAWSSDKSigV4Config: () => resolveAWSSDKSigV4Config, + resolveAwsSdkSigV4AConfig: () => resolveAwsSdkSigV4AConfig, + resolveAwsSdkSigV4Config: () => resolveAwsSdkSigV4Config, + validateSigningProperties: () => validateSigningProperties +}); +module.exports = __toCommonJS(httpAuthSchemes_exports); + +// src/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.ts +var import_protocol_http2 = __nccwpck_require__(2356); + +// src/submodules/httpAuthSchemes/utils/getDateHeader.ts +var import_protocol_http = __nccwpck_require__(2356); +var getDateHeader = /* @__PURE__ */ __name((response) => { + var _a, _b; + return import_protocol_http.HttpResponse.isInstance(response) ? ((_a = response.headers) == null ? void 0 : _a.date) ?? ((_b = response.headers) == null ? void 0 : _b.Date) : void 0; +}, "getDateHeader"); + +// src/submodules/httpAuthSchemes/utils/getSkewCorrectedDate.ts +var getSkewCorrectedDate = /* @__PURE__ */ __name((systemClockOffset) => new Date(Date.now() + systemClockOffset), "getSkewCorrectedDate"); + +// src/submodules/httpAuthSchemes/utils/isClockSkewed.ts +var isClockSkewed = /* @__PURE__ */ __name((clockTime, systemClockOffset) => Math.abs(getSkewCorrectedDate(systemClockOffset).getTime() - clockTime) >= 3e5, "isClockSkewed"); + +// src/submodules/httpAuthSchemes/utils/getUpdatedSystemClockOffset.ts +var getUpdatedSystemClockOffset = /* @__PURE__ */ __name((clockTime, currentSystemClockOffset) => { + const clockTimeInMs = Date.parse(clockTime); + if (isClockSkewed(clockTimeInMs, currentSystemClockOffset)) { + return clockTimeInMs - Date.now(); + } + return currentSystemClockOffset; +}, "getUpdatedSystemClockOffset"); + +// src/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4Signer.ts +var throwSigningPropertyError = /* @__PURE__ */ __name((name, property) => { + if (!property) { + throw new Error(`Property \`${name}\` is not resolved for AWS SDK SigV4Auth`); + } + return property; +}, "throwSigningPropertyError"); +var validateSigningProperties = /* @__PURE__ */ __name(async (signingProperties) => { + var _a, _b, _c; + const context = throwSigningPropertyError( + "context", + signingProperties.context + ); + const config = throwSigningPropertyError("config", signingProperties.config); + const authScheme = (_c = (_b = (_a = context.endpointV2) == null ? void 0 : _a.properties) == null ? void 0 : _b.authSchemes) == null ? void 0 : _c[0]; + const signerFunction = throwSigningPropertyError( + "signer", + config.signer + ); + const signer = await signerFunction(authScheme); + const signingRegion = signingProperties == null ? void 0 : signingProperties.signingRegion; + const signingRegionSet = signingProperties == null ? void 0 : signingProperties.signingRegionSet; + const signingName = signingProperties == null ? void 0 : signingProperties.signingName; + return { + config, + signer, + signingRegion, + signingRegionSet, + signingName + }; +}, "validateSigningProperties"); +var _AwsSdkSigV4Signer = class _AwsSdkSigV4Signer { + async sign(httpRequest, identity, signingProperties) { + var _a; + if (!import_protocol_http2.HttpRequest.isInstance(httpRequest)) { + throw new Error("The request is not an instance of `HttpRequest` and cannot be signed"); + } + const validatedProps = await validateSigningProperties(signingProperties); + const { config, signer } = validatedProps; + let { signingRegion, signingName } = validatedProps; + const handlerExecutionContext = signingProperties.context; + if (((_a = handlerExecutionContext == null ? void 0 : handlerExecutionContext.authSchemes) == null ? void 0 : _a.length) ?? 0 > 1) { + const [first, second] = handlerExecutionContext.authSchemes; + if ((first == null ? void 0 : first.name) === "sigv4a" && (second == null ? void 0 : second.name) === "sigv4") { + signingRegion = (second == null ? void 0 : second.signingRegion) ?? signingRegion; + signingName = (second == null ? void 0 : second.signingName) ?? signingName; + } + } + const signedRequest = await signer.sign(httpRequest, { + signingDate: getSkewCorrectedDate(config.systemClockOffset), + signingRegion, + signingService: signingName + }); + return signedRequest; + } + errorHandler(signingProperties) { + return (error) => { + const serverTime = error.ServerTime ?? getDateHeader(error.$response); + if (serverTime) { + const config = throwSigningPropertyError("config", signingProperties.config); + const initialSystemClockOffset = config.systemClockOffset; + config.systemClockOffset = getUpdatedSystemClockOffset(serverTime, config.systemClockOffset); + const clockSkewCorrected = config.systemClockOffset !== initialSystemClockOffset; + if (clockSkewCorrected && error.$metadata) { + error.$metadata.clockSkewCorrected = true; + } + } + throw error; + }; + } + successHandler(httpResponse, signingProperties) { + const dateHeader = getDateHeader(httpResponse); + if (dateHeader) { + const config = throwSigningPropertyError("config", signingProperties.config); + config.systemClockOffset = getUpdatedSystemClockOffset(dateHeader, config.systemClockOffset); + } + } +}; +__name(_AwsSdkSigV4Signer, "AwsSdkSigV4Signer"); +var AwsSdkSigV4Signer = _AwsSdkSigV4Signer; +var AWSSDKSigV4Signer = AwsSdkSigV4Signer; + +// src/submodules/httpAuthSchemes/aws_sdk/AwsSdkSigV4ASigner.ts +var import_protocol_http3 = __nccwpck_require__(2356); +var _AwsSdkSigV4ASigner = class _AwsSdkSigV4ASigner extends AwsSdkSigV4Signer { + async sign(httpRequest, identity, signingProperties) { + var _a; + if (!import_protocol_http3.HttpRequest.isInstance(httpRequest)) { + throw new Error("The request is not an instance of `HttpRequest` and cannot be signed"); + } + const { config, signer, signingRegion, signingRegionSet, signingName } = await validateSigningProperties( + signingProperties + ); + const configResolvedSigningRegionSet = await ((_a = config.sigv4aSigningRegionSet) == null ? void 0 : _a.call(config)); + const multiRegionOverride = (configResolvedSigningRegionSet ?? signingRegionSet ?? [signingRegion]).join(","); + const signedRequest = await signer.sign(httpRequest, { + signingDate: getSkewCorrectedDate(config.systemClockOffset), + signingRegion: multiRegionOverride, + signingService: signingName + }); + return signedRequest; + } +}; +__name(_AwsSdkSigV4ASigner, "AwsSdkSigV4ASigner"); +var AwsSdkSigV4ASigner = _AwsSdkSigV4ASigner; + +// src/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4AConfig.ts +var import_core = __nccwpck_require__(402); +var import_property_provider = __nccwpck_require__(1238); +var resolveAwsSdkSigV4AConfig = /* @__PURE__ */ __name((config) => { + config.sigv4aSigningRegionSet = (0, import_core.normalizeProvider)(config.sigv4aSigningRegionSet); + return config; +}, "resolveAwsSdkSigV4AConfig"); +var NODE_SIGV4A_CONFIG_OPTIONS = { + environmentVariableSelector(env) { + if (env.AWS_SIGV4A_SIGNING_REGION_SET) { + return env.AWS_SIGV4A_SIGNING_REGION_SET.split(",").map((_) => _.trim()); + } + throw new import_property_provider.ProviderError("AWS_SIGV4A_SIGNING_REGION_SET not set in env.", { + tryNextLink: true + }); + }, + configFileSelector(profile) { + if (profile.sigv4a_signing_region_set) { + return (profile.sigv4a_signing_region_set ?? "").split(",").map((_) => _.trim()); + } + throw new import_property_provider.ProviderError("sigv4a_signing_region_set not set in profile.", { + tryNextLink: true + }); + }, + default: void 0 +}; + +// src/submodules/httpAuthSchemes/aws_sdk/resolveAwsSdkSigV4Config.ts +var import_client = __nccwpck_require__(5152); +var import_core2 = __nccwpck_require__(402); +var import_signature_v4 = __nccwpck_require__(5118); +var resolveAwsSdkSigV4Config = /* @__PURE__ */ __name((config) => { + let isUserSupplied = false; + let credentialsProvider; + if (config.credentials) { + isUserSupplied = true; + credentialsProvider = (0, import_core2.memoizeIdentityProvider)(config.credentials, import_core2.isIdentityExpired, import_core2.doesIdentityRequireRefresh); + } + if (!credentialsProvider) { + if (config.credentialDefaultProvider) { + credentialsProvider = (0, import_core2.normalizeProvider)( + config.credentialDefaultProvider( + Object.assign({}, config, { + parentClientConfig: config + }) + ) + ); + } else { + credentialsProvider = /* @__PURE__ */ __name(async () => { + throw new Error("`credentials` is missing"); + }, "credentialsProvider"); + } + } + const boundCredentialsProvider = /* @__PURE__ */ __name(async () => credentialsProvider({ callerClientConfig: config }), "boundCredentialsProvider"); + const { + // Default for signingEscapePath + signingEscapePath = true, + // Default for systemClockOffset + systemClockOffset = config.systemClockOffset || 0, + // No default for sha256 since it is platform dependent + sha256 + } = config; + let signer; + if (config.signer) { + signer = (0, import_core2.normalizeProvider)(config.signer); + } else if (config.regionInfoProvider) { + signer = /* @__PURE__ */ __name(() => (0, import_core2.normalizeProvider)(config.region)().then( + async (region) => [ + await config.regionInfoProvider(region, { + useFipsEndpoint: await config.useFipsEndpoint(), + useDualstackEndpoint: await config.useDualstackEndpoint() + }) || {}, + region + ] + ).then(([regionInfo, region]) => { + const { signingRegion, signingService } = regionInfo; + config.signingRegion = config.signingRegion || signingRegion || region; + config.signingName = config.signingName || signingService || config.serviceId; + const params = { + ...config, + credentials: boundCredentialsProvider, + region: config.signingRegion, + service: config.signingName, + sha256, + uriEscapePath: signingEscapePath + }; + const SignerCtor = config.signerConstructor || import_signature_v4.SignatureV4; + return new SignerCtor(params); + }), "signer"); + } else { + signer = /* @__PURE__ */ __name(async (authScheme) => { + authScheme = Object.assign( + {}, + { + name: "sigv4", + signingName: config.signingName || config.defaultSigningName, + signingRegion: await (0, import_core2.normalizeProvider)(config.region)(), + properties: {} + }, + authScheme + ); + const signingRegion = authScheme.signingRegion; + const signingService = authScheme.signingName; + config.signingRegion = config.signingRegion || signingRegion; + config.signingName = config.signingName || signingService || config.serviceId; + const params = { + ...config, + credentials: boundCredentialsProvider, + region: config.signingRegion, + service: config.signingName, + sha256, + uriEscapePath: signingEscapePath + }; + const SignerCtor = config.signerConstructor || import_signature_v4.SignatureV4; + return new SignerCtor(params); + }, "signer"); + } + return { + ...config, + systemClockOffset, + signingEscapePath, + credentials: isUserSupplied ? async () => boundCredentialsProvider().then( + (creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_CODE", "e") + ) : boundCredentialsProvider, + signer + }; +}, "resolveAwsSdkSigV4Config"); +var resolveAWSSDKSigV4Config = resolveAwsSdkSigV4Config; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 7288: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/protocols/index.ts +var protocols_exports = {}; +__export(protocols_exports, { + _toBool: () => _toBool, + _toNum: () => _toNum, + _toStr: () => _toStr, + awsExpectUnion: () => awsExpectUnion, + loadRestJsonErrorCode: () => loadRestJsonErrorCode, + loadRestXmlErrorCode: () => loadRestXmlErrorCode, + parseJsonBody: () => parseJsonBody, + parseJsonErrorBody: () => parseJsonErrorBody, + parseXmlBody: () => parseXmlBody, + parseXmlErrorBody: () => parseXmlErrorBody +}); +module.exports = __toCommonJS(protocols_exports); + +// src/submodules/protocols/coercing-serializers.ts +var _toStr = /* @__PURE__ */ __name((val) => { + if (val == null) { + return val; + } + if (typeof val === "number" || typeof val === "bigint") { + const warning = new Error(`Received number ${val} where a string was expected.`); + warning.name = "Warning"; + console.warn(warning); + return String(val); + } + if (typeof val === "boolean") { + const warning = new Error(`Received boolean ${val} where a string was expected.`); + warning.name = "Warning"; + console.warn(warning); + return String(val); + } + return val; +}, "_toStr"); +var _toBool = /* @__PURE__ */ __name((val) => { + if (val == null) { + return val; + } + if (typeof val === "number") { + } + if (typeof val === "string") { + const lowercase = val.toLowerCase(); + if (val !== "" && lowercase !== "false" && lowercase !== "true") { + const warning = new Error(`Received string "${val}" where a boolean was expected.`); + warning.name = "Warning"; + console.warn(warning); + } + return val !== "" && lowercase !== "false"; + } + return val; +}, "_toBool"); +var _toNum = /* @__PURE__ */ __name((val) => { + if (val == null) { + return val; + } + if (typeof val === "boolean") { + } + if (typeof val === "string") { + const num = Number(val); + if (num.toString() !== val) { + const warning = new Error(`Received string "${val}" where a number was expected.`); + warning.name = "Warning"; + console.warn(warning); + return val; + } + return num; + } + return val; +}, "_toNum"); + +// src/submodules/protocols/json/awsExpectUnion.ts +var import_smithy_client = __nccwpck_require__(1411); +var awsExpectUnion = /* @__PURE__ */ __name((value) => { + if (value == null) { + return void 0; + } + if (typeof value === "object" && "__type" in value) { + delete value.__type; + } + return (0, import_smithy_client.expectUnion)(value); +}, "awsExpectUnion"); + +// src/submodules/protocols/common.ts +var import_smithy_client2 = __nccwpck_require__(1411); +var collectBodyString = /* @__PURE__ */ __name((streamBody, context) => (0, import_smithy_client2.collectBody)(streamBody, context).then((body) => context.utf8Encoder(body)), "collectBodyString"); + +// src/submodules/protocols/json/parseJsonBody.ts +var parseJsonBody = /* @__PURE__ */ __name((streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + try { + return JSON.parse(encoded); + } catch (e) { + if ((e == null ? void 0 : e.name) === "SyntaxError") { + Object.defineProperty(e, "$responseBodyText", { + value: encoded + }); + } + throw e; + } + } + return {}; +}), "parseJsonBody"); +var parseJsonErrorBody = /* @__PURE__ */ __name(async (errorBody, context) => { + const value = await parseJsonBody(errorBody, context); + value.message = value.message ?? value.Message; + return value; +}, "parseJsonErrorBody"); +var loadRestJsonErrorCode = /* @__PURE__ */ __name((output, data) => { + const findKey = /* @__PURE__ */ __name((object, key) => Object.keys(object).find((k) => k.toLowerCase() === key.toLowerCase()), "findKey"); + const sanitizeErrorCode = /* @__PURE__ */ __name((rawValue) => { + let cleanValue = rawValue; + if (typeof cleanValue === "number") { + cleanValue = cleanValue.toString(); + } + if (cleanValue.indexOf(",") >= 0) { + cleanValue = cleanValue.split(",")[0]; + } + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }, "sanitizeErrorCode"); + const headerKey = findKey(output.headers, "x-amzn-errortype"); + if (headerKey !== void 0) { + return sanitizeErrorCode(output.headers[headerKey]); + } + if (data.code !== void 0) { + return sanitizeErrorCode(data.code); + } + if (data["__type"] !== void 0) { + return sanitizeErrorCode(data["__type"]); + } +}, "loadRestJsonErrorCode"); + +// src/submodules/protocols/xml/parseXmlBody.ts +var import_smithy_client3 = __nccwpck_require__(1411); +var import_fast_xml_parser = __nccwpck_require__(9741); +var parseXmlBody = /* @__PURE__ */ __name((streamBody, context) => collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + const parser = new import_fast_xml_parser.XMLParser({ + attributeNamePrefix: "", + htmlEntities: true, + ignoreAttributes: false, + ignoreDeclaration: true, + parseTagValue: false, + trimValues: false, + tagValueProcessor: (_, val) => val.trim() === "" && val.includes("\n") ? "" : void 0 + }); + parser.addEntity("#xD", "\r"); + parser.addEntity("#10", "\n"); + let parsedObj; + try { + parsedObj = parser.parse(encoded, true); + } catch (e) { + if (e && typeof e === "object") { + Object.defineProperty(e, "$responseBodyText", { + value: encoded + }); + } + throw e; + } + const textNodeName = "#text"; + const key = Object.keys(parsedObj)[0]; + const parsedObjToReturn = parsedObj[key]; + if (parsedObjToReturn[textNodeName]) { + parsedObjToReturn[key] = parsedObjToReturn[textNodeName]; + delete parsedObjToReturn[textNodeName]; + } + return (0, import_smithy_client3.getValueFromTextNode)(parsedObjToReturn); + } + return {}; +}), "parseXmlBody"); +var parseXmlErrorBody = /* @__PURE__ */ __name(async (errorBody, context) => { + const value = await parseXmlBody(errorBody, context); + if (value.Error) { + value.Error.message = value.Error.message ?? value.Error.Message; + } + return value; +}, "parseXmlErrorBody"); +var loadRestXmlErrorCode = /* @__PURE__ */ __name((output, data) => { + var _a; + if (((_a = data == null ? void 0 : data.Error) == null ? void 0 : _a.Code) !== void 0) { + return data.Error.Code; + } + if ((data == null ? void 0 : data.Code) !== void 0) { + return data.Code; + } + if (output.statusCode == 404) { + return "NotFound"; + } +}, "loadRestXmlErrorCode"); +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 5606: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + ENV_ACCOUNT_ID: () => ENV_ACCOUNT_ID, + ENV_CREDENTIAL_SCOPE: () => ENV_CREDENTIAL_SCOPE, + ENV_EXPIRATION: () => ENV_EXPIRATION, + ENV_KEY: () => ENV_KEY, + ENV_SECRET: () => ENV_SECRET, + ENV_SESSION: () => ENV_SESSION, + fromEnv: () => fromEnv +}); +module.exports = __toCommonJS(src_exports); + +// src/fromEnv.ts +var import_client = __nccwpck_require__(5152); +var import_property_provider = __nccwpck_require__(1238); +var ENV_KEY = "AWS_ACCESS_KEY_ID"; +var ENV_SECRET = "AWS_SECRET_ACCESS_KEY"; +var ENV_SESSION = "AWS_SESSION_TOKEN"; +var ENV_EXPIRATION = "AWS_CREDENTIAL_EXPIRATION"; +var ENV_CREDENTIAL_SCOPE = "AWS_CREDENTIAL_SCOPE"; +var ENV_ACCOUNT_ID = "AWS_ACCOUNT_ID"; +var fromEnv = /* @__PURE__ */ __name((init) => async () => { + var _a; + (_a = init == null ? void 0 : init.logger) == null ? void 0 : _a.debug("@aws-sdk/credential-provider-env - fromEnv"); + const accessKeyId = process.env[ENV_KEY]; + const secretAccessKey = process.env[ENV_SECRET]; + const sessionToken = process.env[ENV_SESSION]; + const expiry = process.env[ENV_EXPIRATION]; + const credentialScope = process.env[ENV_CREDENTIAL_SCOPE]; + const accountId = process.env[ENV_ACCOUNT_ID]; + if (accessKeyId && secretAccessKey) { + const credentials = { + accessKeyId, + secretAccessKey, + ...sessionToken && { sessionToken }, + ...expiry && { expiration: new Date(expiry) }, + ...credentialScope && { credentialScope }, + ...accountId && { accountId } + }; + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_ENV_VARS", "g"); + return credentials; + } + throw new import_property_provider.CredentialsProviderError("Unable to find environment variable credentials.", { logger: init == null ? void 0 : init.logger }); +}, "fromEnv"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 1509: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.checkUrl = void 0; +const property_provider_1 = __nccwpck_require__(1238); +const LOOPBACK_CIDR_IPv4 = "127.0.0.0/8"; +const LOOPBACK_CIDR_IPv6 = "::1/128"; +const ECS_CONTAINER_HOST = "169.254.170.2"; +const EKS_CONTAINER_HOST_IPv4 = "169.254.170.23"; +const EKS_CONTAINER_HOST_IPv6 = "[fd00:ec2::23]"; +const checkUrl = (url, logger) => { + if (url.protocol === "https:") { + return; + } + if (url.hostname === ECS_CONTAINER_HOST || + url.hostname === EKS_CONTAINER_HOST_IPv4 || + url.hostname === EKS_CONTAINER_HOST_IPv6) { + return; + } + if (url.hostname.includes("[")) { + if (url.hostname === "[::1]" || url.hostname === "[0000:0000:0000:0000:0000:0000:0000:0001]") { + return; + } + } + else { + if (url.hostname === "localhost") { + return; + } + const ipComponents = url.hostname.split("."); + const inRange = (component) => { + const num = parseInt(component, 10); + return 0 <= num && num <= 255; + }; + if (ipComponents[0] === "127" && + inRange(ipComponents[1]) && + inRange(ipComponents[2]) && + inRange(ipComponents[3]) && + ipComponents.length === 4) { + return; + } + } + throw new property_provider_1.CredentialsProviderError(`URL not accepted. It must either be HTTPS or match one of the following: + - loopback CIDR 127.0.0.0/8 or [::1/128] + - ECS container host 169.254.170.2 + - EKS container host 169.254.170.23 or [fd00:ec2::23]`, { logger }); +}; +exports.checkUrl = checkUrl; + + +/***/ }), + +/***/ 8712: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromHttp = void 0; +const tslib_1 = __nccwpck_require__(1860); +const client_1 = __nccwpck_require__(5152); +const node_http_handler_1 = __nccwpck_require__(1279); +const property_provider_1 = __nccwpck_require__(1238); +const promises_1 = tslib_1.__importDefault(__nccwpck_require__(1943)); +const checkUrl_1 = __nccwpck_require__(1509); +const requestHelpers_1 = __nccwpck_require__(8914); +const retry_wrapper_1 = __nccwpck_require__(1122); +const AWS_CONTAINER_CREDENTIALS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +const DEFAULT_LINK_LOCAL_HOST = "http://169.254.170.2"; +const AWS_CONTAINER_CREDENTIALS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +const AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE = "AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE"; +const AWS_CONTAINER_AUTHORIZATION_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +const fromHttp = (options = {}) => { + options.logger?.debug("@aws-sdk/credential-provider-http - fromHttp"); + let host; + const relative = options.awsContainerCredentialsRelativeUri ?? process.env[AWS_CONTAINER_CREDENTIALS_RELATIVE_URI]; + const full = options.awsContainerCredentialsFullUri ?? process.env[AWS_CONTAINER_CREDENTIALS_FULL_URI]; + const token = options.awsContainerAuthorizationToken ?? process.env[AWS_CONTAINER_AUTHORIZATION_TOKEN]; + const tokenFile = options.awsContainerAuthorizationTokenFile ?? process.env[AWS_CONTAINER_AUTHORIZATION_TOKEN_FILE]; + const warn = options.logger?.constructor?.name === "NoOpLogger" || !options.logger ? console.warn : options.logger.warn; + if (relative && full) { + warn("@aws-sdk/credential-provider-http: " + + "you have set both awsContainerCredentialsRelativeUri and awsContainerCredentialsFullUri."); + warn("awsContainerCredentialsFullUri will take precedence."); + } + if (token && tokenFile) { + warn("@aws-sdk/credential-provider-http: " + + "you have set both awsContainerAuthorizationToken and awsContainerAuthorizationTokenFile."); + warn("awsContainerAuthorizationToken will take precedence."); + } + if (full) { + host = full; + } + else if (relative) { + host = `${DEFAULT_LINK_LOCAL_HOST}${relative}`; + } + else { + throw new property_provider_1.CredentialsProviderError(`No HTTP credential provider host provided. +Set AWS_CONTAINER_CREDENTIALS_FULL_URI or AWS_CONTAINER_CREDENTIALS_RELATIVE_URI.`, { logger: options.logger }); + } + const url = new URL(host); + (0, checkUrl_1.checkUrl)(url, options.logger); + const requestHandler = new node_http_handler_1.NodeHttpHandler({ + requestTimeout: options.timeout ?? 1000, + connectionTimeout: options.timeout ?? 1000, + }); + return (0, retry_wrapper_1.retryWrapper)(async () => { + const request = (0, requestHelpers_1.createGetRequest)(url); + if (token) { + request.headers.Authorization = token; + } + else if (tokenFile) { + request.headers.Authorization = (await promises_1.default.readFile(tokenFile)).toString(); + } + try { + const result = await requestHandler.handle(request); + return (0, requestHelpers_1.getCredentials)(result.response).then((creds) => (0, client_1.setCredentialFeature)(creds, "CREDENTIALS_HTTP", "z")); + } + catch (e) { + throw new property_provider_1.CredentialsProviderError(String(e), { logger: options.logger }); + } + }, options.maxRetries ?? 3, options.timeout ?? 1000); +}; +exports.fromHttp = fromHttp; + + +/***/ }), + +/***/ 8914: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getCredentials = exports.createGetRequest = void 0; +const property_provider_1 = __nccwpck_require__(1238); +const protocol_http_1 = __nccwpck_require__(2356); +const smithy_client_1 = __nccwpck_require__(1411); +const util_stream_1 = __nccwpck_require__(4252); +function createGetRequest(url) { + return new protocol_http_1.HttpRequest({ + protocol: url.protocol, + hostname: url.hostname, + port: Number(url.port), + path: url.pathname, + query: Array.from(url.searchParams.entries()).reduce((acc, [k, v]) => { + acc[k] = v; + return acc; + }, {}), + fragment: url.hash, + }); +} +exports.createGetRequest = createGetRequest; +async function getCredentials(response, logger) { + const stream = (0, util_stream_1.sdkStreamMixin)(response.body); + const str = await stream.transformToString(); + if (response.statusCode === 200) { + const parsed = JSON.parse(str); + if (typeof parsed.AccessKeyId !== "string" || + typeof parsed.SecretAccessKey !== "string" || + typeof parsed.Token !== "string" || + typeof parsed.Expiration !== "string") { + throw new property_provider_1.CredentialsProviderError("HTTP credential provider response not of the required format, an object matching: " + + "{ AccessKeyId: string, SecretAccessKey: string, Token: string, Expiration: string(rfc3339) }", { logger }); + } + return { + accessKeyId: parsed.AccessKeyId, + secretAccessKey: parsed.SecretAccessKey, + sessionToken: parsed.Token, + expiration: (0, smithy_client_1.parseRfc3339DateTime)(parsed.Expiration), + }; + } + if (response.statusCode >= 400 && response.statusCode < 500) { + let parsedBody = {}; + try { + parsedBody = JSON.parse(str); + } + catch (e) { } + throw Object.assign(new property_provider_1.CredentialsProviderError(`Server responded with status: ${response.statusCode}`, { logger }), { + Code: parsedBody.Code, + Message: parsedBody.Message, + }); + } + throw new property_provider_1.CredentialsProviderError(`Server responded with status: ${response.statusCode}`, { logger }); +} +exports.getCredentials = getCredentials; + + +/***/ }), + +/***/ 1122: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.retryWrapper = void 0; +const retryWrapper = (toRetry, maxRetries, delayMs) => { + return async () => { + for (let i = 0; i < maxRetries; ++i) { + try { + return await toRetry(); + } + catch (e) { + await new Promise((resolve) => setTimeout(resolve, delayMs)); + } + } + return await toRetry(); + }; +}; +exports.retryWrapper = retryWrapper; + + +/***/ }), + +/***/ 8605: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromHttp = void 0; +var fromHttp_1 = __nccwpck_require__(8712); +Object.defineProperty(exports, "fromHttp", ({ enumerable: true, get: function () { return fromHttp_1.fromHttp; } })); + + +/***/ }), + +/***/ 5869: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromIni: () => fromIni +}); +module.exports = __toCommonJS(src_exports); + +// src/fromIni.ts + + +// src/resolveProfileData.ts + + +// src/resolveAssumeRoleCredentials.ts + + +var import_shared_ini_file_loader = __nccwpck_require__(4964); + +// src/resolveCredentialSource.ts +var import_client = __nccwpck_require__(5152); +var import_property_provider = __nccwpck_require__(1238); +var resolveCredentialSource = /* @__PURE__ */ __name((credentialSource, profileName, logger) => { + const sourceProvidersMap = { + EcsContainer: async (options) => { + const { fromHttp } = await Promise.resolve().then(() => __toESM(__nccwpck_require__(8605))); + const { fromContainerMetadata } = await Promise.resolve().then(() => __toESM(__nccwpck_require__(566))); + logger == null ? void 0 : logger.debug("@aws-sdk/credential-provider-ini - credential_source is EcsContainer"); + return async () => (0, import_property_provider.chain)(fromHttp(options ?? {}), fromContainerMetadata(options))().then(setNamedProvider); + }, + Ec2InstanceMetadata: async (options) => { + logger == null ? void 0 : logger.debug("@aws-sdk/credential-provider-ini - credential_source is Ec2InstanceMetadata"); + const { fromInstanceMetadata } = await Promise.resolve().then(() => __toESM(__nccwpck_require__(566))); + return async () => fromInstanceMetadata(options)().then(setNamedProvider); + }, + Environment: async (options) => { + logger == null ? void 0 : logger.debug("@aws-sdk/credential-provider-ini - credential_source is Environment"); + const { fromEnv } = await Promise.resolve().then(() => __toESM(__nccwpck_require__(5606))); + return async () => fromEnv(options)().then(setNamedProvider); + } + }; + if (credentialSource in sourceProvidersMap) { + return sourceProvidersMap[credentialSource]; + } else { + throw new import_property_provider.CredentialsProviderError( + `Unsupported credential source in profile ${profileName}. Got ${credentialSource}, expected EcsContainer or Ec2InstanceMetadata or Environment.`, + { logger } + ); + } +}, "resolveCredentialSource"); +var setNamedProvider = /* @__PURE__ */ __name((creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_NAMED_PROVIDER", "p"), "setNamedProvider"); + +// src/resolveAssumeRoleCredentials.ts +var isAssumeRoleProfile = /* @__PURE__ */ __name((arg, { profile = "default", logger } = {}) => { + return Boolean(arg) && typeof arg === "object" && typeof arg.role_arn === "string" && ["undefined", "string"].indexOf(typeof arg.role_session_name) > -1 && ["undefined", "string"].indexOf(typeof arg.external_id) > -1 && ["undefined", "string"].indexOf(typeof arg.mfa_serial) > -1 && (isAssumeRoleWithSourceProfile(arg, { profile, logger }) || isCredentialSourceProfile(arg, { profile, logger })); +}, "isAssumeRoleProfile"); +var isAssumeRoleWithSourceProfile = /* @__PURE__ */ __name((arg, { profile, logger }) => { + var _a; + const withSourceProfile = typeof arg.source_profile === "string" && typeof arg.credential_source === "undefined"; + if (withSourceProfile) { + (_a = logger == null ? void 0 : logger.debug) == null ? void 0 : _a.call(logger, ` ${profile} isAssumeRoleWithSourceProfile source_profile=${arg.source_profile}`); + } + return withSourceProfile; +}, "isAssumeRoleWithSourceProfile"); +var isCredentialSourceProfile = /* @__PURE__ */ __name((arg, { profile, logger }) => { + var _a; + const withProviderProfile = typeof arg.credential_source === "string" && typeof arg.source_profile === "undefined"; + if (withProviderProfile) { + (_a = logger == null ? void 0 : logger.debug) == null ? void 0 : _a.call(logger, ` ${profile} isCredentialSourceProfile credential_source=${arg.credential_source}`); + } + return withProviderProfile; +}, "isCredentialSourceProfile"); +var resolveAssumeRoleCredentials = /* @__PURE__ */ __name(async (profileName, profiles, options, visitedProfiles = {}) => { + var _a, _b, _c; + (_a = options.logger) == null ? void 0 : _a.debug("@aws-sdk/credential-provider-ini - resolveAssumeRoleCredentials (STS)"); + const profileData = profiles[profileName]; + const { source_profile, region } = profileData; + if (!options.roleAssumer) { + const { getDefaultRoleAssumer } = await Promise.resolve().then(() => __toESM(__nccwpck_require__(1695))); + options.roleAssumer = getDefaultRoleAssumer( + { + ...options.clientConfig, + credentialProviderLogger: options.logger, + parentClientConfig: { + ...options == null ? void 0 : options.parentClientConfig, + region: region ?? ((_b = options == null ? void 0 : options.parentClientConfig) == null ? void 0 : _b.region) + } + }, + options.clientPlugins + ); + } + if (source_profile && source_profile in visitedProfiles) { + throw new import_property_provider.CredentialsProviderError( + `Detected a cycle attempting to resolve credentials for profile ${(0, import_shared_ini_file_loader.getProfileName)(options)}. Profiles visited: ` + Object.keys(visitedProfiles).join(", "), + { logger: options.logger } + ); + } + (_c = options.logger) == null ? void 0 : _c.debug( + `@aws-sdk/credential-provider-ini - finding credential resolver using ${source_profile ? `source_profile=[${source_profile}]` : `profile=[${profileName}]`}` + ); + const sourceCredsProvider = source_profile ? resolveProfileData( + source_profile, + profiles, + options, + { + ...visitedProfiles, + [source_profile]: true + }, + isCredentialSourceWithoutRoleArn(profiles[source_profile] ?? {}) + ) : (await resolveCredentialSource(profileData.credential_source, profileName, options.logger)(options))(); + if (isCredentialSourceWithoutRoleArn(profileData)) { + return sourceCredsProvider.then((creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_SOURCE_PROFILE", "o")); + } else { + const params = { + RoleArn: profileData.role_arn, + RoleSessionName: profileData.role_session_name || `aws-sdk-js-${Date.now()}`, + ExternalId: profileData.external_id, + DurationSeconds: parseInt(profileData.duration_seconds || "3600", 10) + }; + const { mfa_serial } = profileData; + if (mfa_serial) { + if (!options.mfaCodeProvider) { + throw new import_property_provider.CredentialsProviderError( + `Profile ${profileName} requires multi-factor authentication, but no MFA code callback was provided.`, + { logger: options.logger, tryNextLink: false } + ); + } + params.SerialNumber = mfa_serial; + params.TokenCode = await options.mfaCodeProvider(mfa_serial); + } + const sourceCreds = await sourceCredsProvider; + return options.roleAssumer(sourceCreds, params).then( + (creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_SOURCE_PROFILE", "o") + ); + } +}, "resolveAssumeRoleCredentials"); +var isCredentialSourceWithoutRoleArn = /* @__PURE__ */ __name((section) => { + return !section.role_arn && !!section.credential_source; +}, "isCredentialSourceWithoutRoleArn"); + +// src/resolveProcessCredentials.ts + +var isProcessProfile = /* @__PURE__ */ __name((arg) => Boolean(arg) && typeof arg === "object" && typeof arg.credential_process === "string", "isProcessProfile"); +var resolveProcessCredentials = /* @__PURE__ */ __name(async (options, profile) => Promise.resolve().then(() => __toESM(__nccwpck_require__(5360))).then( + ({ fromProcess }) => fromProcess({ + ...options, + profile + })().then((creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_PROCESS", "v")) +), "resolveProcessCredentials"); + +// src/resolveSsoCredentials.ts + +var resolveSsoCredentials = /* @__PURE__ */ __name(async (profile, profileData, options = {}) => { + const { fromSSO } = await Promise.resolve().then(() => __toESM(__nccwpck_require__(998))); + return fromSSO({ + profile, + logger: options.logger, + parentClientConfig: options.parentClientConfig, + clientConfig: options.clientConfig + })().then((creds) => { + if (profileData.sso_session) { + return (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_SSO", "r"); + } else { + return (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_SSO_LEGACY", "t"); + } + }); +}, "resolveSsoCredentials"); +var isSsoProfile = /* @__PURE__ */ __name((arg) => arg && (typeof arg.sso_start_url === "string" || typeof arg.sso_account_id === "string" || typeof arg.sso_session === "string" || typeof arg.sso_region === "string" || typeof arg.sso_role_name === "string"), "isSsoProfile"); + +// src/resolveStaticCredentials.ts + +var isStaticCredsProfile = /* @__PURE__ */ __name((arg) => Boolean(arg) && typeof arg === "object" && typeof arg.aws_access_key_id === "string" && typeof arg.aws_secret_access_key === "string" && ["undefined", "string"].indexOf(typeof arg.aws_session_token) > -1 && ["undefined", "string"].indexOf(typeof arg.aws_account_id) > -1, "isStaticCredsProfile"); +var resolveStaticCredentials = /* @__PURE__ */ __name(async (profile, options) => { + var _a; + (_a = options == null ? void 0 : options.logger) == null ? void 0 : _a.debug("@aws-sdk/credential-provider-ini - resolveStaticCredentials"); + const credentials = { + accessKeyId: profile.aws_access_key_id, + secretAccessKey: profile.aws_secret_access_key, + sessionToken: profile.aws_session_token, + ...profile.aws_credential_scope && { credentialScope: profile.aws_credential_scope }, + ...profile.aws_account_id && { accountId: profile.aws_account_id } + }; + return (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_PROFILE", "n"); +}, "resolveStaticCredentials"); + +// src/resolveWebIdentityCredentials.ts + +var isWebIdentityProfile = /* @__PURE__ */ __name((arg) => Boolean(arg) && typeof arg === "object" && typeof arg.web_identity_token_file === "string" && typeof arg.role_arn === "string" && ["undefined", "string"].indexOf(typeof arg.role_session_name) > -1, "isWebIdentityProfile"); +var resolveWebIdentityCredentials = /* @__PURE__ */ __name(async (profile, options) => Promise.resolve().then(() => __toESM(__nccwpck_require__(9956))).then( + ({ fromTokenFile }) => fromTokenFile({ + webIdentityTokenFile: profile.web_identity_token_file, + roleArn: profile.role_arn, + roleSessionName: profile.role_session_name, + roleAssumerWithWebIdentity: options.roleAssumerWithWebIdentity, + logger: options.logger, + parentClientConfig: options.parentClientConfig + })().then((creds) => (0, import_client.setCredentialFeature)(creds, "CREDENTIALS_PROFILE_STS_WEB_ID_TOKEN", "q")) +), "resolveWebIdentityCredentials"); + +// src/resolveProfileData.ts +var resolveProfileData = /* @__PURE__ */ __name(async (profileName, profiles, options, visitedProfiles = {}, isAssumeRoleRecursiveCall = false) => { + const data = profiles[profileName]; + if (Object.keys(visitedProfiles).length > 0 && isStaticCredsProfile(data)) { + return resolveStaticCredentials(data, options); + } + if (isAssumeRoleRecursiveCall || isAssumeRoleProfile(data, { profile: profileName, logger: options.logger })) { + return resolveAssumeRoleCredentials(profileName, profiles, options, visitedProfiles); + } + if (isStaticCredsProfile(data)) { + return resolveStaticCredentials(data, options); + } + if (isWebIdentityProfile(data)) { + return resolveWebIdentityCredentials(data, options); + } + if (isProcessProfile(data)) { + return resolveProcessCredentials(options, profileName); + } + if (isSsoProfile(data)) { + return await resolveSsoCredentials(profileName, data, options); + } + throw new import_property_provider.CredentialsProviderError( + `Could not resolve credentials using profile: [${profileName}] in configuration/credentials file(s).`, + { logger: options.logger } + ); +}, "resolveProfileData"); + +// src/fromIni.ts +var fromIni = /* @__PURE__ */ __name((_init = {}) => async ({ callerClientConfig } = {}) => { + var _a; + const init = { + ..._init, + parentClientConfig: { + ...callerClientConfig, + ..._init.parentClientConfig + } + }; + (_a = init.logger) == null ? void 0 : _a.debug("@aws-sdk/credential-provider-ini - fromIni"); + const profiles = await (0, import_shared_ini_file_loader.parseKnownFiles)(init); + return resolveProfileData( + (0, import_shared_ini_file_loader.getProfileName)({ + profile: _init.profile ?? (callerClientConfig == null ? void 0 : callerClientConfig.profile) + }), + profiles, + init + ); +}, "fromIni"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 5861: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + credentialsTreatedAsExpired: () => credentialsTreatedAsExpired, + credentialsWillNeedRefresh: () => credentialsWillNeedRefresh, + defaultProvider: () => defaultProvider +}); +module.exports = __toCommonJS(src_exports); + +// src/defaultProvider.ts +var import_credential_provider_env = __nccwpck_require__(5606); + +var import_shared_ini_file_loader = __nccwpck_require__(4964); + +// src/remoteProvider.ts +var import_property_provider = __nccwpck_require__(1238); +var ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +var remoteProvider = /* @__PURE__ */ __name(async (init) => { + var _a, _b; + const { ENV_CMDS_FULL_URI, ENV_CMDS_RELATIVE_URI, fromContainerMetadata, fromInstanceMetadata } = await Promise.resolve().then(() => __toESM(__nccwpck_require__(566))); + if (process.env[ENV_CMDS_RELATIVE_URI] || process.env[ENV_CMDS_FULL_URI]) { + (_a = init.logger) == null ? void 0 : _a.debug("@aws-sdk/credential-provider-node - remoteProvider::fromHttp/fromContainerMetadata"); + const { fromHttp } = await Promise.resolve().then(() => __toESM(__nccwpck_require__(8605))); + return (0, import_property_provider.chain)(fromHttp(init), fromContainerMetadata(init)); + } + if (process.env[ENV_IMDS_DISABLED]) { + return async () => { + throw new import_property_provider.CredentialsProviderError("EC2 Instance Metadata Service access disabled", { logger: init.logger }); + }; + } + (_b = init.logger) == null ? void 0 : _b.debug("@aws-sdk/credential-provider-node - remoteProvider::fromInstanceMetadata"); + return fromInstanceMetadata(init); +}, "remoteProvider"); + +// src/defaultProvider.ts +var multipleCredentialSourceWarningEmitted = false; +var defaultProvider = /* @__PURE__ */ __name((init = {}) => (0, import_property_provider.memoize)( + (0, import_property_provider.chain)( + async () => { + var _a, _b, _c, _d; + const profile = init.profile ?? process.env[import_shared_ini_file_loader.ENV_PROFILE]; + if (profile) { + const envStaticCredentialsAreSet = process.env[import_credential_provider_env.ENV_KEY] && process.env[import_credential_provider_env.ENV_SECRET]; + if (envStaticCredentialsAreSet) { + if (!multipleCredentialSourceWarningEmitted) { + const warnFn = ((_a = init.logger) == null ? void 0 : _a.warn) && ((_c = (_b = init.logger) == null ? void 0 : _b.constructor) == null ? void 0 : _c.name) !== "NoOpLogger" ? init.logger.warn : console.warn; + warnFn( + `@aws-sdk/credential-provider-node - defaultProvider::fromEnv WARNING: + Multiple credential sources detected: + Both AWS_PROFILE and the pair AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY static credentials are set. + This SDK will proceed with the AWS_PROFILE value. + + However, a future version may change this behavior to prefer the ENV static credentials. + Please ensure that your environment only sets either the AWS_PROFILE or the + AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY pair. +` + ); + multipleCredentialSourceWarningEmitted = true; + } + } + throw new import_property_provider.CredentialsProviderError("AWS_PROFILE is set, skipping fromEnv provider.", { + logger: init.logger, + tryNextLink: true + }); + } + (_d = init.logger) == null ? void 0 : _d.debug("@aws-sdk/credential-provider-node - defaultProvider::fromEnv"); + return (0, import_credential_provider_env.fromEnv)(init)(); + }, + async () => { + var _a; + (_a = init.logger) == null ? void 0 : _a.debug("@aws-sdk/credential-provider-node - defaultProvider::fromSSO"); + const { ssoStartUrl, ssoAccountId, ssoRegion, ssoRoleName, ssoSession } = init; + if (!ssoStartUrl && !ssoAccountId && !ssoRegion && !ssoRoleName && !ssoSession) { + throw new import_property_provider.CredentialsProviderError( + "Skipping SSO provider in default chain (inputs do not include SSO fields).", + { logger: init.logger } + ); + } + const { fromSSO } = await Promise.resolve().then(() => __toESM(__nccwpck_require__(998))); + return fromSSO(init)(); + }, + async () => { + var _a; + (_a = init.logger) == null ? void 0 : _a.debug("@aws-sdk/credential-provider-node - defaultProvider::fromIni"); + const { fromIni } = await Promise.resolve().then(() => __toESM(__nccwpck_require__(5869))); + return fromIni(init)(); + }, + async () => { + var _a; + (_a = init.logger) == null ? void 0 : _a.debug("@aws-sdk/credential-provider-node - defaultProvider::fromProcess"); + const { fromProcess } = await Promise.resolve().then(() => __toESM(__nccwpck_require__(5360))); + return fromProcess(init)(); + }, + async () => { + var _a; + (_a = init.logger) == null ? void 0 : _a.debug("@aws-sdk/credential-provider-node - defaultProvider::fromTokenFile"); + const { fromTokenFile } = await Promise.resolve().then(() => __toESM(__nccwpck_require__(9956))); + return fromTokenFile(init)(); + }, + async () => { + var _a; + (_a = init.logger) == null ? void 0 : _a.debug("@aws-sdk/credential-provider-node - defaultProvider::remoteProvider"); + return (await remoteProvider(init))(); + }, + async () => { + throw new import_property_provider.CredentialsProviderError("Could not load credentials from any providers", { + tryNextLink: false, + logger: init.logger + }); + } + ), + credentialsTreatedAsExpired, + credentialsWillNeedRefresh +), "defaultProvider"); +var credentialsWillNeedRefresh = /* @__PURE__ */ __name((credentials) => (credentials == null ? void 0 : credentials.expiration) !== void 0, "credentialsWillNeedRefresh"); +var credentialsTreatedAsExpired = /* @__PURE__ */ __name((credentials) => (credentials == null ? void 0 : credentials.expiration) !== void 0 && credentials.expiration.getTime() - Date.now() < 3e5, "credentialsTreatedAsExpired"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 5360: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromProcess: () => fromProcess +}); +module.exports = __toCommonJS(src_exports); + +// src/fromProcess.ts +var import_shared_ini_file_loader = __nccwpck_require__(4964); + +// src/resolveProcessCredentials.ts +var import_property_provider = __nccwpck_require__(1238); +var import_child_process = __nccwpck_require__(5317); +var import_util = __nccwpck_require__(9023); + +// src/getValidatedProcessCredentials.ts +var import_client = __nccwpck_require__(5152); +var getValidatedProcessCredentials = /* @__PURE__ */ __name((profileName, data, profiles) => { + var _a; + if (data.Version !== 1) { + throw Error(`Profile ${profileName} credential_process did not return Version 1.`); + } + if (data.AccessKeyId === void 0 || data.SecretAccessKey === void 0) { + throw Error(`Profile ${profileName} credential_process returned invalid credentials.`); + } + if (data.Expiration) { + const currentTime = /* @__PURE__ */ new Date(); + const expireTime = new Date(data.Expiration); + if (expireTime < currentTime) { + throw Error(`Profile ${profileName} credential_process returned expired credentials.`); + } + } + let accountId = data.AccountId; + if (!accountId && ((_a = profiles == null ? void 0 : profiles[profileName]) == null ? void 0 : _a.aws_account_id)) { + accountId = profiles[profileName].aws_account_id; + } + const credentials = { + accessKeyId: data.AccessKeyId, + secretAccessKey: data.SecretAccessKey, + ...data.SessionToken && { sessionToken: data.SessionToken }, + ...data.Expiration && { expiration: new Date(data.Expiration) }, + ...data.CredentialScope && { credentialScope: data.CredentialScope }, + ...accountId && { accountId } + }; + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_PROCESS", "w"); + return credentials; +}, "getValidatedProcessCredentials"); + +// src/resolveProcessCredentials.ts +var resolveProcessCredentials = /* @__PURE__ */ __name(async (profileName, profiles, logger) => { + const profile = profiles[profileName]; + if (profiles[profileName]) { + const credentialProcess = profile["credential_process"]; + if (credentialProcess !== void 0) { + const execPromise = (0, import_util.promisify)(import_child_process.exec); + try { + const { stdout } = await execPromise(credentialProcess); + let data; + try { + data = JSON.parse(stdout.trim()); + } catch { + throw Error(`Profile ${profileName} credential_process returned invalid JSON.`); + } + return getValidatedProcessCredentials(profileName, data, profiles); + } catch (error) { + throw new import_property_provider.CredentialsProviderError(error.message, { logger }); + } + } else { + throw new import_property_provider.CredentialsProviderError(`Profile ${profileName} did not contain credential_process.`, { logger }); + } + } else { + throw new import_property_provider.CredentialsProviderError(`Profile ${profileName} could not be found in shared credentials file.`, { + logger + }); + } +}, "resolveProcessCredentials"); + +// src/fromProcess.ts +var fromProcess = /* @__PURE__ */ __name((init = {}) => async ({ callerClientConfig } = {}) => { + var _a; + (_a = init.logger) == null ? void 0 : _a.debug("@aws-sdk/credential-provider-process - fromProcess"); + const profiles = await (0, import_shared_ini_file_loader.parseKnownFiles)(init); + return resolveProcessCredentials( + (0, import_shared_ini_file_loader.getProfileName)({ + profile: init.profile ?? (callerClientConfig == null ? void 0 : callerClientConfig.profile) + }), + profiles, + init.logger + ); +}, "fromProcess"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 998: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __esm = (fn, res) => function __init() { + return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res; +}; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/loadSso.ts +var loadSso_exports = {}; +__export(loadSso_exports, { + GetRoleCredentialsCommand: () => import_client_sso.GetRoleCredentialsCommand, + SSOClient: () => import_client_sso.SSOClient +}); +var import_client_sso; +var init_loadSso = __esm({ + "src/loadSso.ts"() { + "use strict"; + import_client_sso = __nccwpck_require__(2054); + } +}); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromSSO: () => fromSSO, + isSsoProfile: () => isSsoProfile, + validateSsoProfile: () => validateSsoProfile +}); +module.exports = __toCommonJS(src_exports); + +// src/fromSSO.ts + + + +// src/isSsoProfile.ts +var isSsoProfile = /* @__PURE__ */ __name((arg) => arg && (typeof arg.sso_start_url === "string" || typeof arg.sso_account_id === "string" || typeof arg.sso_session === "string" || typeof arg.sso_region === "string" || typeof arg.sso_role_name === "string"), "isSsoProfile"); + +// src/resolveSSOCredentials.ts +var import_client = __nccwpck_require__(5152); +var import_token_providers = __nccwpck_require__(5433); +var import_property_provider = __nccwpck_require__(1238); +var import_shared_ini_file_loader = __nccwpck_require__(4964); +var SHOULD_FAIL_CREDENTIAL_CHAIN = false; +var resolveSSOCredentials = /* @__PURE__ */ __name(async ({ + ssoStartUrl, + ssoSession, + ssoAccountId, + ssoRegion, + ssoRoleName, + ssoClient, + clientConfig, + parentClientConfig, + profile, + logger +}) => { + let token; + const refreshMessage = `To refresh this SSO session run aws sso login with the corresponding profile.`; + if (ssoSession) { + try { + const _token = await (0, import_token_providers.fromSso)({ profile })(); + token = { + accessToken: _token.token, + expiresAt: new Date(_token.expiration).toISOString() + }; + } catch (e) { + throw new import_property_provider.CredentialsProviderError(e.message, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + } else { + try { + token = await (0, import_shared_ini_file_loader.getSSOTokenFromFile)(ssoStartUrl); + } catch (e) { + throw new import_property_provider.CredentialsProviderError(`The SSO session associated with this profile is invalid. ${refreshMessage}`, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + } + if (new Date(token.expiresAt).getTime() - Date.now() <= 0) { + throw new import_property_provider.CredentialsProviderError(`The SSO session associated with this profile has expired. ${refreshMessage}`, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + const { accessToken } = token; + const { SSOClient: SSOClient2, GetRoleCredentialsCommand: GetRoleCredentialsCommand2 } = await Promise.resolve().then(() => (init_loadSso(), loadSso_exports)); + const sso = ssoClient || new SSOClient2( + Object.assign({}, clientConfig ?? {}, { + logger: (clientConfig == null ? void 0 : clientConfig.logger) ?? (parentClientConfig == null ? void 0 : parentClientConfig.logger), + region: (clientConfig == null ? void 0 : clientConfig.region) ?? ssoRegion + }) + ); + let ssoResp; + try { + ssoResp = await sso.send( + new GetRoleCredentialsCommand2({ + accountId: ssoAccountId, + roleName: ssoRoleName, + accessToken + }) + ); + } catch (e) { + throw new import_property_provider.CredentialsProviderError(e, { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + const { + roleCredentials: { accessKeyId, secretAccessKey, sessionToken, expiration, credentialScope, accountId } = {} + } = ssoResp; + if (!accessKeyId || !secretAccessKey || !sessionToken || !expiration) { + throw new import_property_provider.CredentialsProviderError("SSO returns an invalid temporary credential.", { + tryNextLink: SHOULD_FAIL_CREDENTIAL_CHAIN, + logger + }); + } + const credentials = { + accessKeyId, + secretAccessKey, + sessionToken, + expiration: new Date(expiration), + ...credentialScope && { credentialScope }, + ...accountId && { accountId } + }; + if (ssoSession) { + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_SSO", "s"); + } else { + (0, import_client.setCredentialFeature)(credentials, "CREDENTIALS_SSO_LEGACY", "u"); + } + return credentials; +}, "resolveSSOCredentials"); + +// src/validateSsoProfile.ts + +var validateSsoProfile = /* @__PURE__ */ __name((profile, logger) => { + const { sso_start_url, sso_account_id, sso_region, sso_role_name } = profile; + if (!sso_start_url || !sso_account_id || !sso_region || !sso_role_name) { + throw new import_property_provider.CredentialsProviderError( + `Profile is configured with invalid SSO credentials. Required parameters "sso_account_id", "sso_region", "sso_role_name", "sso_start_url". Got ${Object.keys(profile).join( + ", " + )} +Reference: https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-sso.html`, + { tryNextLink: false, logger } + ); + } + return profile; +}, "validateSsoProfile"); + +// src/fromSSO.ts +var fromSSO = /* @__PURE__ */ __name((init = {}) => async ({ callerClientConfig } = {}) => { + var _a; + (_a = init.logger) == null ? void 0 : _a.debug("@aws-sdk/credential-provider-sso - fromSSO"); + const { ssoStartUrl, ssoAccountId, ssoRegion, ssoRoleName, ssoSession } = init; + const { ssoClient } = init; + const profileName = (0, import_shared_ini_file_loader.getProfileName)({ + profile: init.profile ?? (callerClientConfig == null ? void 0 : callerClientConfig.profile) + }); + if (!ssoStartUrl && !ssoAccountId && !ssoRegion && !ssoRoleName && !ssoSession) { + const profiles = await (0, import_shared_ini_file_loader.parseKnownFiles)(init); + const profile = profiles[profileName]; + if (!profile) { + throw new import_property_provider.CredentialsProviderError(`Profile ${profileName} was not found.`, { logger: init.logger }); + } + if (!isSsoProfile(profile)) { + throw new import_property_provider.CredentialsProviderError(`Profile ${profileName} is not configured with SSO credentials.`, { + logger: init.logger + }); + } + if (profile == null ? void 0 : profile.sso_session) { + const ssoSessions = await (0, import_shared_ini_file_loader.loadSsoSessionData)(init); + const session = ssoSessions[profile.sso_session]; + const conflictMsg = ` configurations in profile ${profileName} and sso-session ${profile.sso_session}`; + if (ssoRegion && ssoRegion !== session.sso_region) { + throw new import_property_provider.CredentialsProviderError(`Conflicting SSO region` + conflictMsg, { + tryNextLink: false, + logger: init.logger + }); + } + if (ssoStartUrl && ssoStartUrl !== session.sso_start_url) { + throw new import_property_provider.CredentialsProviderError(`Conflicting SSO start_url` + conflictMsg, { + tryNextLink: false, + logger: init.logger + }); + } + profile.sso_region = session.sso_region; + profile.sso_start_url = session.sso_start_url; + } + const { sso_start_url, sso_account_id, sso_region, sso_role_name, sso_session } = validateSsoProfile( + profile, + init.logger + ); + return resolveSSOCredentials({ + ssoStartUrl: sso_start_url, + ssoSession: sso_session, + ssoAccountId: sso_account_id, + ssoRegion: sso_region, + ssoRoleName: sso_role_name, + ssoClient, + clientConfig: init.clientConfig, + parentClientConfig: init.parentClientConfig, + profile: profileName + }); + } else if (!ssoStartUrl || !ssoAccountId || !ssoRegion || !ssoRoleName) { + throw new import_property_provider.CredentialsProviderError( + 'Incomplete configuration. The fromSSO() argument hash must include "ssoStartUrl", "ssoAccountId", "ssoRegion", "ssoRoleName"', + { tryNextLink: false, logger: init.logger } + ); + } else { + return resolveSSOCredentials({ + ssoStartUrl, + ssoSession, + ssoAccountId, + ssoRegion, + ssoRoleName, + ssoClient, + clientConfig: init.clientConfig, + parentClientConfig: init.parentClientConfig, + profile: profileName + }); + } +}, "fromSSO"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 8079: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromTokenFile = void 0; +const client_1 = __nccwpck_require__(5152); +const property_provider_1 = __nccwpck_require__(1238); +const fs_1 = __nccwpck_require__(9896); +const fromWebToken_1 = __nccwpck_require__(4453); +const ENV_TOKEN_FILE = "AWS_WEB_IDENTITY_TOKEN_FILE"; +const ENV_ROLE_ARN = "AWS_ROLE_ARN"; +const ENV_ROLE_SESSION_NAME = "AWS_ROLE_SESSION_NAME"; +const fromTokenFile = (init = {}) => async () => { + init.logger?.debug("@aws-sdk/credential-provider-web-identity - fromTokenFile"); + const webIdentityTokenFile = init?.webIdentityTokenFile ?? process.env[ENV_TOKEN_FILE]; + const roleArn = init?.roleArn ?? process.env[ENV_ROLE_ARN]; + const roleSessionName = init?.roleSessionName ?? process.env[ENV_ROLE_SESSION_NAME]; + if (!webIdentityTokenFile || !roleArn) { + throw new property_provider_1.CredentialsProviderError("Web identity configuration not specified", { + logger: init.logger, + }); + } + const credentials = await (0, fromWebToken_1.fromWebToken)({ + ...init, + webIdentityToken: (0, fs_1.readFileSync)(webIdentityTokenFile, { encoding: "ascii" }), + roleArn, + roleSessionName, + })(); + if (webIdentityTokenFile === process.env[ENV_TOKEN_FILE]) { + (0, client_1.setCredentialFeature)(credentials, "CREDENTIALS_ENV_VARS_STS_WEB_ID_TOKEN", "h"); + } + return credentials; +}; +exports.fromTokenFile = fromTokenFile; + + +/***/ }), + +/***/ 4453: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromWebToken = void 0; +const fromWebToken = (init) => async (awsIdentityProperties) => { + init.logger?.debug("@aws-sdk/credential-provider-web-identity - fromWebToken"); + const { roleArn, roleSessionName, webIdentityToken, providerId, policyArns, policy, durationSeconds } = init; + let { roleAssumerWithWebIdentity } = init; + if (!roleAssumerWithWebIdentity) { + const { getDefaultRoleAssumerWithWebIdentity } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(1695))); + roleAssumerWithWebIdentity = getDefaultRoleAssumerWithWebIdentity({ + ...init.clientConfig, + credentialProviderLogger: init.logger, + parentClientConfig: { + ...awsIdentityProperties?.callerClientConfig, + ...init.parentClientConfig, + }, + }, init.clientPlugins); + } + return roleAssumerWithWebIdentity({ + RoleArn: roleArn, + RoleSessionName: roleSessionName ?? `aws-sdk-js-session-${Date.now()}`, + WebIdentityToken: webIdentityToken, + ProviderId: providerId, + PolicyArns: policyArns, + Policy: policy, + DurationSeconds: durationSeconds, + }); +}; +exports.fromWebToken = fromWebToken; + + +/***/ }), + +/***/ 9956: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +module.exports = __toCommonJS(src_exports); +__reExport(src_exports, __nccwpck_require__(8079), module.exports); +__reExport(src_exports, __nccwpck_require__(4453), module.exports); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 2590: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + getHostHeaderPlugin: () => getHostHeaderPlugin, + hostHeaderMiddleware: () => hostHeaderMiddleware, + hostHeaderMiddlewareOptions: () => hostHeaderMiddlewareOptions, + resolveHostHeaderConfig: () => resolveHostHeaderConfig +}); +module.exports = __toCommonJS(src_exports); +var import_protocol_http = __nccwpck_require__(2356); +function resolveHostHeaderConfig(input) { + return input; +} +__name(resolveHostHeaderConfig, "resolveHostHeaderConfig"); +var hostHeaderMiddleware = /* @__PURE__ */ __name((options) => (next) => async (args) => { + if (!import_protocol_http.HttpRequest.isInstance(args.request)) + return next(args); + const { request } = args; + const { handlerProtocol = "" } = options.requestHandler.metadata || {}; + if (handlerProtocol.indexOf("h2") >= 0 && !request.headers[":authority"]) { + delete request.headers["host"]; + request.headers[":authority"] = request.hostname + (request.port ? ":" + request.port : ""); + } else if (!request.headers["host"]) { + let host = request.hostname; + if (request.port != null) + host += `:${request.port}`; + request.headers["host"] = host; + } + return next(args); +}, "hostHeaderMiddleware"); +var hostHeaderMiddlewareOptions = { + name: "hostHeaderMiddleware", + step: "build", + priority: "low", + tags: ["HOST"], + override: true +}; +var getHostHeaderPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: (clientStack) => { + clientStack.add(hostHeaderMiddleware(options), hostHeaderMiddlewareOptions); + } +}), "getHostHeaderPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 5242: +/***/ ((module) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + getLoggerPlugin: () => getLoggerPlugin, + loggerMiddleware: () => loggerMiddleware, + loggerMiddlewareOptions: () => loggerMiddlewareOptions +}); +module.exports = __toCommonJS(src_exports); + +// src/loggerMiddleware.ts +var loggerMiddleware = /* @__PURE__ */ __name(() => (next, context) => async (args) => { + var _a, _b; + try { + const response = await next(args); + const { clientName, commandName, logger, dynamoDbDocumentClientOptions = {} } = context; + const { overrideInputFilterSensitiveLog, overrideOutputFilterSensitiveLog } = dynamoDbDocumentClientOptions; + const inputFilterSensitiveLog = overrideInputFilterSensitiveLog ?? context.inputFilterSensitiveLog; + const outputFilterSensitiveLog = overrideOutputFilterSensitiveLog ?? context.outputFilterSensitiveLog; + const { $metadata, ...outputWithoutMetadata } = response.output; + (_a = logger == null ? void 0 : logger.info) == null ? void 0 : _a.call(logger, { + clientName, + commandName, + input: inputFilterSensitiveLog(args.input), + output: outputFilterSensitiveLog(outputWithoutMetadata), + metadata: $metadata + }); + return response; + } catch (error) { + const { clientName, commandName, logger, dynamoDbDocumentClientOptions = {} } = context; + const { overrideInputFilterSensitiveLog } = dynamoDbDocumentClientOptions; + const inputFilterSensitiveLog = overrideInputFilterSensitiveLog ?? context.inputFilterSensitiveLog; + (_b = logger == null ? void 0 : logger.error) == null ? void 0 : _b.call(logger, { + clientName, + commandName, + input: inputFilterSensitiveLog(args.input), + error, + metadata: error.$metadata + }); + throw error; + } +}, "loggerMiddleware"); +var loggerMiddlewareOptions = { + name: "loggerMiddleware", + tags: ["LOGGER"], + step: "initialize", + override: true +}; +var getLoggerPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: (clientStack) => { + clientStack.add(loggerMiddleware(), loggerMiddlewareOptions); + } +}), "getLoggerPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 1568: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + addRecursionDetectionMiddlewareOptions: () => addRecursionDetectionMiddlewareOptions, + getRecursionDetectionPlugin: () => getRecursionDetectionPlugin, + recursionDetectionMiddleware: () => recursionDetectionMiddleware +}); +module.exports = __toCommonJS(src_exports); +var import_protocol_http = __nccwpck_require__(2356); +var TRACE_ID_HEADER_NAME = "X-Amzn-Trace-Id"; +var ENV_LAMBDA_FUNCTION_NAME = "AWS_LAMBDA_FUNCTION_NAME"; +var ENV_TRACE_ID = "_X_AMZN_TRACE_ID"; +var recursionDetectionMiddleware = /* @__PURE__ */ __name((options) => (next) => async (args) => { + const { request } = args; + if (!import_protocol_http.HttpRequest.isInstance(request) || options.runtime !== "node" || request.headers.hasOwnProperty(TRACE_ID_HEADER_NAME)) { + return next(args); + } + const functionName = process.env[ENV_LAMBDA_FUNCTION_NAME]; + const traceId = process.env[ENV_TRACE_ID]; + const nonEmptyString = /* @__PURE__ */ __name((str) => typeof str === "string" && str.length > 0, "nonEmptyString"); + if (nonEmptyString(functionName) && nonEmptyString(traceId)) { + request.headers[TRACE_ID_HEADER_NAME] = traceId; + } + return next({ + ...args, + request + }); +}, "recursionDetectionMiddleware"); +var addRecursionDetectionMiddlewareOptions = { + step: "build", + tags: ["RECURSION_DETECTION"], + name: "recursionDetectionMiddleware", + override: true, + priority: "low" +}; +var getRecursionDetectionPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: (clientStack) => { + clientStack.add(recursionDetectionMiddleware(options), addRecursionDetectionMiddlewareOptions); + } +}), "getRecursionDetectionPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 2959: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + DEFAULT_UA_APP_ID: () => DEFAULT_UA_APP_ID, + getUserAgentMiddlewareOptions: () => getUserAgentMiddlewareOptions, + getUserAgentPlugin: () => getUserAgentPlugin, + resolveUserAgentConfig: () => resolveUserAgentConfig, + userAgentMiddleware: () => userAgentMiddleware +}); +module.exports = __toCommonJS(src_exports); + +// src/configurations.ts +var import_core = __nccwpck_require__(402); +var DEFAULT_UA_APP_ID = void 0; +function isValidUserAgentAppId(appId) { + if (appId === void 0) { + return true; + } + return typeof appId === "string" && appId.length <= 50; +} +__name(isValidUserAgentAppId, "isValidUserAgentAppId"); +function resolveUserAgentConfig(input) { + const normalizedAppIdProvider = (0, import_core.normalizeProvider)(input.userAgentAppId ?? DEFAULT_UA_APP_ID); + return { + ...input, + customUserAgent: typeof input.customUserAgent === "string" ? [[input.customUserAgent]] : input.customUserAgent, + userAgentAppId: async () => { + var _a, _b; + const appId = await normalizedAppIdProvider(); + if (!isValidUserAgentAppId(appId)) { + const logger = ((_b = (_a = input.logger) == null ? void 0 : _a.constructor) == null ? void 0 : _b.name) === "NoOpLogger" || !input.logger ? console : input.logger; + if (typeof appId !== "string") { + logger == null ? void 0 : logger.warn("userAgentAppId must be a string or undefined."); + } else if (appId.length > 50) { + logger == null ? void 0 : logger.warn("The provided userAgentAppId exceeds the maximum length of 50 characters."); + } + } + return appId; + } + }; +} +__name(resolveUserAgentConfig, "resolveUserAgentConfig"); + +// src/user-agent-middleware.ts +var import_util_endpoints = __nccwpck_require__(3068); +var import_protocol_http = __nccwpck_require__(2356); + +// src/check-features.ts +var import_core2 = __nccwpck_require__(8704); +var ACCOUNT_ID_ENDPOINT_REGEX = /\d{12}\.ddb/; +async function checkFeatures(context, config, args) { + var _a, _b, _c, _d, _e, _f, _g; + const request = args.request; + if (((_a = request == null ? void 0 : request.headers) == null ? void 0 : _a["smithy-protocol"]) === "rpc-v2-cbor") { + (0, import_core2.setFeature)(context, "PROTOCOL_RPC_V2_CBOR", "M"); + } + if (typeof config.retryStrategy === "function") { + const retryStrategy = await config.retryStrategy(); + if (typeof retryStrategy.acquireInitialRetryToken === "function") { + if ((_c = (_b = retryStrategy.constructor) == null ? void 0 : _b.name) == null ? void 0 : _c.includes("Adaptive")) { + (0, import_core2.setFeature)(context, "RETRY_MODE_ADAPTIVE", "F"); + } else { + (0, import_core2.setFeature)(context, "RETRY_MODE_STANDARD", "E"); + } + } else { + (0, import_core2.setFeature)(context, "RETRY_MODE_LEGACY", "D"); + } + } + if (typeof config.accountIdEndpointMode === "function") { + const endpointV2 = context.endpointV2; + if (String((_d = endpointV2 == null ? void 0 : endpointV2.url) == null ? void 0 : _d.hostname).match(ACCOUNT_ID_ENDPOINT_REGEX)) { + (0, import_core2.setFeature)(context, "ACCOUNT_ID_ENDPOINT", "O"); + } + switch (await ((_e = config.accountIdEndpointMode) == null ? void 0 : _e.call(config))) { + case "disabled": + (0, import_core2.setFeature)(context, "ACCOUNT_ID_MODE_DISABLED", "Q"); + break; + case "preferred": + (0, import_core2.setFeature)(context, "ACCOUNT_ID_MODE_PREFERRED", "P"); + break; + case "required": + (0, import_core2.setFeature)(context, "ACCOUNT_ID_MODE_REQUIRED", "R"); + break; + } + } + const identity = (_g = (_f = context.__smithy_context) == null ? void 0 : _f.selectedHttpAuthScheme) == null ? void 0 : _g.identity; + if (identity == null ? void 0 : identity.$source) { + const credentials = identity; + if (credentials.accountId) { + (0, import_core2.setFeature)(context, "RESOLVED_ACCOUNT_ID", "T"); + } + for (const [key, value] of Object.entries(credentials.$source ?? {})) { + (0, import_core2.setFeature)(context, key, value); + } + } +} +__name(checkFeatures, "checkFeatures"); + +// src/constants.ts +var USER_AGENT = "user-agent"; +var X_AMZ_USER_AGENT = "x-amz-user-agent"; +var SPACE = " "; +var UA_NAME_SEPARATOR = "/"; +var UA_NAME_ESCAPE_REGEX = /[^\!\$\%\&\'\*\+\-\.\^\_\`\|\~\d\w]/g; +var UA_VALUE_ESCAPE_REGEX = /[^\!\$\%\&\'\*\+\-\.\^\_\`\|\~\d\w\#]/g; +var UA_ESCAPE_CHAR = "-"; + +// src/encode-features.ts +var BYTE_LIMIT = 1024; +function encodeFeatures(features) { + let buffer = ""; + for (const key in features) { + const val = features[key]; + if (buffer.length + val.length + 1 <= BYTE_LIMIT) { + if (buffer.length) { + buffer += "," + val; + } else { + buffer += val; + } + continue; + } + break; + } + return buffer; +} +__name(encodeFeatures, "encodeFeatures"); + +// src/user-agent-middleware.ts +var userAgentMiddleware = /* @__PURE__ */ __name((options) => (next, context) => async (args) => { + var _a, _b, _c, _d; + const { request } = args; + if (!import_protocol_http.HttpRequest.isInstance(request)) { + return next(args); + } + const { headers } = request; + const userAgent = ((_a = context == null ? void 0 : context.userAgent) == null ? void 0 : _a.map(escapeUserAgent)) || []; + const defaultUserAgent = (await options.defaultUserAgentProvider()).map(escapeUserAgent); + await checkFeatures(context, options, args); + const awsContext = context; + defaultUserAgent.push( + `m/${encodeFeatures( + Object.assign({}, (_b = context.__smithy_context) == null ? void 0 : _b.features, (_c = awsContext.__aws_sdk_context) == null ? void 0 : _c.features) + )}` + ); + const customUserAgent = ((_d = options == null ? void 0 : options.customUserAgent) == null ? void 0 : _d.map(escapeUserAgent)) || []; + const appId = await options.userAgentAppId(); + if (appId) { + defaultUserAgent.push(escapeUserAgent([`app/${appId}`])); + } + const prefix = (0, import_util_endpoints.getUserAgentPrefix)(); + const sdkUserAgentValue = (prefix ? [prefix] : []).concat([...defaultUserAgent, ...userAgent, ...customUserAgent]).join(SPACE); + const normalUAValue = [ + ...defaultUserAgent.filter((section) => section.startsWith("aws-sdk-")), + ...customUserAgent + ].join(SPACE); + if (options.runtime !== "browser") { + if (normalUAValue) { + headers[X_AMZ_USER_AGENT] = headers[X_AMZ_USER_AGENT] ? `${headers[USER_AGENT]} ${normalUAValue}` : normalUAValue; + } + headers[USER_AGENT] = sdkUserAgentValue; + } else { + headers[X_AMZ_USER_AGENT] = sdkUserAgentValue; + } + return next({ + ...args, + request + }); +}, "userAgentMiddleware"); +var escapeUserAgent = /* @__PURE__ */ __name((userAgentPair) => { + var _a; + const name = userAgentPair[0].split(UA_NAME_SEPARATOR).map((part) => part.replace(UA_NAME_ESCAPE_REGEX, UA_ESCAPE_CHAR)).join(UA_NAME_SEPARATOR); + const version = (_a = userAgentPair[1]) == null ? void 0 : _a.replace(UA_VALUE_ESCAPE_REGEX, UA_ESCAPE_CHAR); + const prefixSeparatorIndex = name.indexOf(UA_NAME_SEPARATOR); + const prefix = name.substring(0, prefixSeparatorIndex); + let uaName = name.substring(prefixSeparatorIndex + 1); + if (prefix === "api") { + uaName = uaName.toLowerCase(); + } + return [prefix, uaName, version].filter((item) => item && item.length > 0).reduce((acc, item, index) => { + switch (index) { + case 0: + return item; + case 1: + return `${acc}/${item}`; + default: + return `${acc}#${item}`; + } + }, ""); +}, "escapeUserAgent"); +var getUserAgentMiddlewareOptions = { + name: "getUserAgentMiddleware", + step: "build", + priority: "low", + tags: ["SET_USER_AGENT", "USER_AGENT"], + override: true +}; +var getUserAgentPlugin = /* @__PURE__ */ __name((config) => ({ + applyToStack: (clientStack) => { + clientStack.add(userAgentMiddleware(config), getUserAgentMiddlewareOptions); + } +}), "getUserAgentPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 6463: +/***/ ((module) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + NODE_REGION_CONFIG_FILE_OPTIONS: () => NODE_REGION_CONFIG_FILE_OPTIONS, + NODE_REGION_CONFIG_OPTIONS: () => NODE_REGION_CONFIG_OPTIONS, + REGION_ENV_NAME: () => REGION_ENV_NAME, + REGION_INI_NAME: () => REGION_INI_NAME, + getAwsRegionExtensionConfiguration: () => getAwsRegionExtensionConfiguration, + resolveAwsRegionExtensionConfiguration: () => resolveAwsRegionExtensionConfiguration, + resolveRegionConfig: () => resolveRegionConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/extensions/index.ts +var getAwsRegionExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + let runtimeConfigRegion = /* @__PURE__ */ __name(async () => { + if (runtimeConfig.region === void 0) { + throw new Error("Region is missing from runtimeConfig"); + } + const region = runtimeConfig.region; + if (typeof region === "string") { + return region; + } + return region(); + }, "runtimeConfigRegion"); + return { + setRegion(region) { + runtimeConfigRegion = region; + }, + region() { + return runtimeConfigRegion; + } + }; +}, "getAwsRegionExtensionConfiguration"); +var resolveAwsRegionExtensionConfiguration = /* @__PURE__ */ __name((awsRegionExtensionConfiguration) => { + return { + region: awsRegionExtensionConfiguration.region() + }; +}, "resolveAwsRegionExtensionConfiguration"); + +// src/regionConfig/config.ts +var REGION_ENV_NAME = "AWS_REGION"; +var REGION_INI_NAME = "region"; +var NODE_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[REGION_ENV_NAME], + configFileSelector: (profile) => profile[REGION_INI_NAME], + default: () => { + throw new Error("Region is missing"); + } +}; +var NODE_REGION_CONFIG_FILE_OPTIONS = { + preferredFile: "credentials" +}; + +// src/regionConfig/isFipsRegion.ts +var isFipsRegion = /* @__PURE__ */ __name((region) => typeof region === "string" && (region.startsWith("fips-") || region.endsWith("-fips")), "isFipsRegion"); + +// src/regionConfig/getRealRegion.ts +var getRealRegion = /* @__PURE__ */ __name((region) => isFipsRegion(region) ? ["fips-aws-global", "aws-fips"].includes(region) ? "us-east-1" : region.replace(/fips-(dkr-|prod-)?|-fips/, "") : region, "getRealRegion"); + +// src/regionConfig/resolveRegionConfig.ts +var resolveRegionConfig = /* @__PURE__ */ __name((input) => { + const { region, useFipsEndpoint } = input; + if (!region) { + throw new Error("Region is missing"); + } + return { + ...input, + region: async () => { + if (typeof region === "string") { + return getRealRegion(region); + } + const providedRegion = await region(); + return getRealRegion(providedRegion); + }, + useFipsEndpoint: async () => { + const providedRegion = typeof region === "string" ? region : await region(); + if (isFipsRegion(providedRegion)) { + return true; + } + return typeof useFipsEndpoint !== "function" ? Promise.resolve(!!useFipsEndpoint) : useFipsEndpoint(); + } + }; +}, "resolveRegionConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 5433: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromSso: () => fromSso, + fromStatic: () => fromStatic, + nodeProvider: () => nodeProvider +}); +module.exports = __toCommonJS(src_exports); + +// src/fromSso.ts + + + +// src/constants.ts +var EXPIRE_WINDOW_MS = 5 * 60 * 1e3; +var REFRESH_MESSAGE = `To refresh this SSO session run 'aws sso login' with the corresponding profile.`; + +// src/getSsoOidcClient.ts +var getSsoOidcClient = /* @__PURE__ */ __name(async (ssoRegion, init = {}) => { + var _a, _b, _c; + const { SSOOIDCClient } = await Promise.resolve().then(() => __toESM(__nccwpck_require__(9260))); + const ssoOidcClient = new SSOOIDCClient( + Object.assign({}, init.clientConfig ?? {}, { + region: ssoRegion ?? ((_a = init.clientConfig) == null ? void 0 : _a.region), + logger: ((_b = init.clientConfig) == null ? void 0 : _b.logger) ?? ((_c = init.parentClientConfig) == null ? void 0 : _c.logger) + }) + ); + return ssoOidcClient; +}, "getSsoOidcClient"); + +// src/getNewSsoOidcToken.ts +var getNewSsoOidcToken = /* @__PURE__ */ __name(async (ssoToken, ssoRegion, init = {}) => { + const { CreateTokenCommand } = await Promise.resolve().then(() => __toESM(__nccwpck_require__(9260))); + const ssoOidcClient = await getSsoOidcClient(ssoRegion, init); + return ssoOidcClient.send( + new CreateTokenCommand({ + clientId: ssoToken.clientId, + clientSecret: ssoToken.clientSecret, + refreshToken: ssoToken.refreshToken, + grantType: "refresh_token" + }) + ); +}, "getNewSsoOidcToken"); + +// src/validateTokenExpiry.ts +var import_property_provider = __nccwpck_require__(1238); +var validateTokenExpiry = /* @__PURE__ */ __name((token) => { + if (token.expiration && token.expiration.getTime() < Date.now()) { + throw new import_property_provider.TokenProviderError(`Token is expired. ${REFRESH_MESSAGE}`, false); + } +}, "validateTokenExpiry"); + +// src/validateTokenKey.ts + +var validateTokenKey = /* @__PURE__ */ __name((key, value, forRefresh = false) => { + if (typeof value === "undefined") { + throw new import_property_provider.TokenProviderError( + `Value not present for '${key}' in SSO Token${forRefresh ? ". Cannot refresh" : ""}. ${REFRESH_MESSAGE}`, + false + ); + } +}, "validateTokenKey"); + +// src/writeSSOTokenToFile.ts +var import_shared_ini_file_loader = __nccwpck_require__(4964); +var import_fs = __nccwpck_require__(9896); +var { writeFile } = import_fs.promises; +var writeSSOTokenToFile = /* @__PURE__ */ __name((id, ssoToken) => { + const tokenFilepath = (0, import_shared_ini_file_loader.getSSOTokenFilepath)(id); + const tokenString = JSON.stringify(ssoToken, null, 2); + return writeFile(tokenFilepath, tokenString); +}, "writeSSOTokenToFile"); + +// src/fromSso.ts +var lastRefreshAttemptTime = /* @__PURE__ */ new Date(0); +var fromSso = /* @__PURE__ */ __name((_init = {}) => async ({ callerClientConfig } = {}) => { + var _a; + const init = { + ..._init, + parentClientConfig: { + ...callerClientConfig, + ..._init.parentClientConfig + } + }; + (_a = init.logger) == null ? void 0 : _a.debug("@aws-sdk/token-providers - fromSso"); + const profiles = await (0, import_shared_ini_file_loader.parseKnownFiles)(init); + const profileName = (0, import_shared_ini_file_loader.getProfileName)({ + profile: init.profile ?? (callerClientConfig == null ? void 0 : callerClientConfig.profile) + }); + const profile = profiles[profileName]; + if (!profile) { + throw new import_property_provider.TokenProviderError(`Profile '${profileName}' could not be found in shared credentials file.`, false); + } else if (!profile["sso_session"]) { + throw new import_property_provider.TokenProviderError(`Profile '${profileName}' is missing required property 'sso_session'.`); + } + const ssoSessionName = profile["sso_session"]; + const ssoSessions = await (0, import_shared_ini_file_loader.loadSsoSessionData)(init); + const ssoSession = ssoSessions[ssoSessionName]; + if (!ssoSession) { + throw new import_property_provider.TokenProviderError( + `Sso session '${ssoSessionName}' could not be found in shared credentials file.`, + false + ); + } + for (const ssoSessionRequiredKey of ["sso_start_url", "sso_region"]) { + if (!ssoSession[ssoSessionRequiredKey]) { + throw new import_property_provider.TokenProviderError( + `Sso session '${ssoSessionName}' is missing required property '${ssoSessionRequiredKey}'.`, + false + ); + } + } + const ssoStartUrl = ssoSession["sso_start_url"]; + const ssoRegion = ssoSession["sso_region"]; + let ssoToken; + try { + ssoToken = await (0, import_shared_ini_file_loader.getSSOTokenFromFile)(ssoSessionName); + } catch (e) { + throw new import_property_provider.TokenProviderError( + `The SSO session token associated with profile=${profileName} was not found or is invalid. ${REFRESH_MESSAGE}`, + false + ); + } + validateTokenKey("accessToken", ssoToken.accessToken); + validateTokenKey("expiresAt", ssoToken.expiresAt); + const { accessToken, expiresAt } = ssoToken; + const existingToken = { token: accessToken, expiration: new Date(expiresAt) }; + if (existingToken.expiration.getTime() - Date.now() > EXPIRE_WINDOW_MS) { + return existingToken; + } + if (Date.now() - lastRefreshAttemptTime.getTime() < 30 * 1e3) { + validateTokenExpiry(existingToken); + return existingToken; + } + validateTokenKey("clientId", ssoToken.clientId, true); + validateTokenKey("clientSecret", ssoToken.clientSecret, true); + validateTokenKey("refreshToken", ssoToken.refreshToken, true); + try { + lastRefreshAttemptTime.setTime(Date.now()); + const newSsoOidcToken = await getNewSsoOidcToken(ssoToken, ssoRegion, init); + validateTokenKey("accessToken", newSsoOidcToken.accessToken); + validateTokenKey("expiresIn", newSsoOidcToken.expiresIn); + const newTokenExpiration = new Date(Date.now() + newSsoOidcToken.expiresIn * 1e3); + try { + await writeSSOTokenToFile(ssoSessionName, { + ...ssoToken, + accessToken: newSsoOidcToken.accessToken, + expiresAt: newTokenExpiration.toISOString(), + refreshToken: newSsoOidcToken.refreshToken + }); + } catch (error) { + } + return { + token: newSsoOidcToken.accessToken, + expiration: newTokenExpiration + }; + } catch (error) { + validateTokenExpiry(existingToken); + return existingToken; + } +}, "fromSso"); + +// src/fromStatic.ts + +var fromStatic = /* @__PURE__ */ __name(({ token, logger }) => async () => { + logger == null ? void 0 : logger.debug("@aws-sdk/token-providers - fromStatic"); + if (!token || !token.token) { + throw new import_property_provider.TokenProviderError(`Please pass a valid token to fromStatic`, false); + } + return token; +}, "fromStatic"); + +// src/nodeProvider.ts + +var nodeProvider = /* @__PURE__ */ __name((init = {}) => (0, import_property_provider.memoize)( + (0, import_property_provider.chain)(fromSso(init), async () => { + throw new import_property_provider.TokenProviderError("Could not load token from any providers", false); + }), + (token) => token.expiration !== void 0 && token.expiration.getTime() - Date.now() < 3e5, + (token) => token.expiration !== void 0 +), "nodeProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 3068: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + ConditionObject: () => import_util_endpoints.ConditionObject, + DeprecatedObject: () => import_util_endpoints.DeprecatedObject, + EndpointError: () => import_util_endpoints.EndpointError, + EndpointObject: () => import_util_endpoints.EndpointObject, + EndpointObjectHeaders: () => import_util_endpoints.EndpointObjectHeaders, + EndpointObjectProperties: () => import_util_endpoints.EndpointObjectProperties, + EndpointParams: () => import_util_endpoints.EndpointParams, + EndpointResolverOptions: () => import_util_endpoints.EndpointResolverOptions, + EndpointRuleObject: () => import_util_endpoints.EndpointRuleObject, + ErrorRuleObject: () => import_util_endpoints.ErrorRuleObject, + EvaluateOptions: () => import_util_endpoints.EvaluateOptions, + Expression: () => import_util_endpoints.Expression, + FunctionArgv: () => import_util_endpoints.FunctionArgv, + FunctionObject: () => import_util_endpoints.FunctionObject, + FunctionReturn: () => import_util_endpoints.FunctionReturn, + ParameterObject: () => import_util_endpoints.ParameterObject, + ReferenceObject: () => import_util_endpoints.ReferenceObject, + ReferenceRecord: () => import_util_endpoints.ReferenceRecord, + RuleSetObject: () => import_util_endpoints.RuleSetObject, + RuleSetRules: () => import_util_endpoints.RuleSetRules, + TreeRuleObject: () => import_util_endpoints.TreeRuleObject, + awsEndpointFunctions: () => awsEndpointFunctions, + getUserAgentPrefix: () => getUserAgentPrefix, + isIpAddress: () => import_util_endpoints.isIpAddress, + partition: () => partition, + resolveEndpoint: () => import_util_endpoints.resolveEndpoint, + setPartitionInfo: () => setPartitionInfo, + useDefaultPartitionInfo: () => useDefaultPartitionInfo +}); +module.exports = __toCommonJS(src_exports); + +// src/aws.ts + + +// src/lib/aws/isVirtualHostableS3Bucket.ts + + +// src/lib/isIpAddress.ts +var import_util_endpoints = __nccwpck_require__(9674); + +// src/lib/aws/isVirtualHostableS3Bucket.ts +var isVirtualHostableS3Bucket = /* @__PURE__ */ __name((value, allowSubDomains = false) => { + if (allowSubDomains) { + for (const label of value.split(".")) { + if (!isVirtualHostableS3Bucket(label)) { + return false; + } + } + return true; + } + if (!(0, import_util_endpoints.isValidHostLabel)(value)) { + return false; + } + if (value.length < 3 || value.length > 63) { + return false; + } + if (value !== value.toLowerCase()) { + return false; + } + if ((0, import_util_endpoints.isIpAddress)(value)) { + return false; + } + return true; +}, "isVirtualHostableS3Bucket"); + +// src/lib/aws/parseArn.ts +var ARN_DELIMITER = ":"; +var RESOURCE_DELIMITER = "/"; +var parseArn = /* @__PURE__ */ __name((value) => { + const segments = value.split(ARN_DELIMITER); + if (segments.length < 6) + return null; + const [arn, partition2, service, region, accountId, ...resourcePath] = segments; + if (arn !== "arn" || partition2 === "" || service === "" || resourcePath.join(ARN_DELIMITER) === "") + return null; + const resourceId = resourcePath.map((resource) => resource.split(RESOURCE_DELIMITER)).flat(); + return { + partition: partition2, + service, + region, + accountId, + resourceId + }; +}, "parseArn"); + +// src/lib/aws/partitions.json +var partitions_default = { + partitions: [{ + id: "aws", + outputs: { + dnsSuffix: "amazonaws.com", + dualStackDnsSuffix: "api.aws", + implicitGlobalRegion: "us-east-1", + name: "aws", + supportsDualStack: true, + supportsFIPS: true + }, + regionRegex: "^(us|eu|ap|sa|ca|me|af|il|mx)\\-\\w+\\-\\d+$", + regions: { + "af-south-1": { + description: "Africa (Cape Town)" + }, + "ap-east-1": { + description: "Asia Pacific (Hong Kong)" + }, + "ap-northeast-1": { + description: "Asia Pacific (Tokyo)" + }, + "ap-northeast-2": { + description: "Asia Pacific (Seoul)" + }, + "ap-northeast-3": { + description: "Asia Pacific (Osaka)" + }, + "ap-south-1": { + description: "Asia Pacific (Mumbai)" + }, + "ap-south-2": { + description: "Asia Pacific (Hyderabad)" + }, + "ap-southeast-1": { + description: "Asia Pacific (Singapore)" + }, + "ap-southeast-2": { + description: "Asia Pacific (Sydney)" + }, + "ap-southeast-3": { + description: "Asia Pacific (Jakarta)" + }, + "ap-southeast-4": { + description: "Asia Pacific (Melbourne)" + }, + "ap-southeast-5": { + description: "Asia Pacific (Malaysia)" + }, + "aws-global": { + description: "AWS Standard global region" + }, + "ca-central-1": { + description: "Canada (Central)" + }, + "ca-west-1": { + description: "Canada West (Calgary)" + }, + "eu-central-1": { + description: "Europe (Frankfurt)" + }, + "eu-central-2": { + description: "Europe (Zurich)" + }, + "eu-north-1": { + description: "Europe (Stockholm)" + }, + "eu-south-1": { + description: "Europe (Milan)" + }, + "eu-south-2": { + description: "Europe (Spain)" + }, + "eu-west-1": { + description: "Europe (Ireland)" + }, + "eu-west-2": { + description: "Europe (London)" + }, + "eu-west-3": { + description: "Europe (Paris)" + }, + "il-central-1": { + description: "Israel (Tel Aviv)" + }, + "me-central-1": { + description: "Middle East (UAE)" + }, + "me-south-1": { + description: "Middle East (Bahrain)" + }, + "sa-east-1": { + description: "South America (Sao Paulo)" + }, + "us-east-1": { + description: "US East (N. Virginia)" + }, + "us-east-2": { + description: "US East (Ohio)" + }, + "us-west-1": { + description: "US West (N. California)" + }, + "us-west-2": { + description: "US West (Oregon)" + } + } + }, { + id: "aws-cn", + outputs: { + dnsSuffix: "amazonaws.com.cn", + dualStackDnsSuffix: "api.amazonwebservices.com.cn", + implicitGlobalRegion: "cn-northwest-1", + name: "aws-cn", + supportsDualStack: true, + supportsFIPS: true + }, + regionRegex: "^cn\\-\\w+\\-\\d+$", + regions: { + "aws-cn-global": { + description: "AWS China global region" + }, + "cn-north-1": { + description: "China (Beijing)" + }, + "cn-northwest-1": { + description: "China (Ningxia)" + } + } + }, { + id: "aws-us-gov", + outputs: { + dnsSuffix: "amazonaws.com", + dualStackDnsSuffix: "api.aws", + implicitGlobalRegion: "us-gov-west-1", + name: "aws-us-gov", + supportsDualStack: true, + supportsFIPS: true + }, + regionRegex: "^us\\-gov\\-\\w+\\-\\d+$", + regions: { + "aws-us-gov-global": { + description: "AWS GovCloud (US) global region" + }, + "us-gov-east-1": { + description: "AWS GovCloud (US-East)" + }, + "us-gov-west-1": { + description: "AWS GovCloud (US-West)" + } + } + }, { + id: "aws-iso", + outputs: { + dnsSuffix: "c2s.ic.gov", + dualStackDnsSuffix: "c2s.ic.gov", + implicitGlobalRegion: "us-iso-east-1", + name: "aws-iso", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^us\\-iso\\-\\w+\\-\\d+$", + regions: { + "aws-iso-global": { + description: "AWS ISO (US) global region" + }, + "us-iso-east-1": { + description: "US ISO East" + }, + "us-iso-west-1": { + description: "US ISO WEST" + } + } + }, { + id: "aws-iso-b", + outputs: { + dnsSuffix: "sc2s.sgov.gov", + dualStackDnsSuffix: "sc2s.sgov.gov", + implicitGlobalRegion: "us-isob-east-1", + name: "aws-iso-b", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^us\\-isob\\-\\w+\\-\\d+$", + regions: { + "aws-iso-b-global": { + description: "AWS ISOB (US) global region" + }, + "us-isob-east-1": { + description: "US ISOB East (Ohio)" + } + } + }, { + id: "aws-iso-e", + outputs: { + dnsSuffix: "cloud.adc-e.uk", + dualStackDnsSuffix: "cloud.adc-e.uk", + implicitGlobalRegion: "eu-isoe-west-1", + name: "aws-iso-e", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^eu\\-isoe\\-\\w+\\-\\d+$", + regions: { + "eu-isoe-west-1": { + description: "EU ISOE West" + } + } + }, { + id: "aws-iso-f", + outputs: { + dnsSuffix: "csp.hci.ic.gov", + dualStackDnsSuffix: "csp.hci.ic.gov", + implicitGlobalRegion: "us-isof-south-1", + name: "aws-iso-f", + supportsDualStack: false, + supportsFIPS: true + }, + regionRegex: "^us\\-isof\\-\\w+\\-\\d+$", + regions: {} + }], + version: "1.1" +}; + +// src/lib/aws/partition.ts +var selectedPartitionsInfo = partitions_default; +var selectedUserAgentPrefix = ""; +var partition = /* @__PURE__ */ __name((value) => { + const { partitions } = selectedPartitionsInfo; + for (const partition2 of partitions) { + const { regions, outputs } = partition2; + for (const [region, regionData] of Object.entries(regions)) { + if (region === value) { + return { + ...outputs, + ...regionData + }; + } + } + } + for (const partition2 of partitions) { + const { regionRegex, outputs } = partition2; + if (new RegExp(regionRegex).test(value)) { + return { + ...outputs + }; + } + } + const DEFAULT_PARTITION = partitions.find((partition2) => partition2.id === "aws"); + if (!DEFAULT_PARTITION) { + throw new Error( + "Provided region was not found in the partition array or regex, and default partition with id 'aws' doesn't exist." + ); + } + return { + ...DEFAULT_PARTITION.outputs + }; +}, "partition"); +var setPartitionInfo = /* @__PURE__ */ __name((partitionsInfo, userAgentPrefix = "") => { + selectedPartitionsInfo = partitionsInfo; + selectedUserAgentPrefix = userAgentPrefix; +}, "setPartitionInfo"); +var useDefaultPartitionInfo = /* @__PURE__ */ __name(() => { + setPartitionInfo(partitions_default, ""); +}, "useDefaultPartitionInfo"); +var getUserAgentPrefix = /* @__PURE__ */ __name(() => selectedUserAgentPrefix, "getUserAgentPrefix"); + +// src/aws.ts +var awsEndpointFunctions = { + isVirtualHostableS3Bucket, + parseArn, + partition +}; +import_util_endpoints.customEndpointFunctions.aws = awsEndpointFunctions; + +// src/resolveEndpoint.ts + + +// src/types/EndpointError.ts + + +// src/types/EndpointRuleObject.ts + + +// src/types/ErrorRuleObject.ts + + +// src/types/RuleSetObject.ts + + +// src/types/TreeRuleObject.ts + + +// src/types/shared.ts + +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 1656: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + NODE_APP_ID_CONFIG_OPTIONS: () => NODE_APP_ID_CONFIG_OPTIONS, + UA_APP_ID_ENV_NAME: () => UA_APP_ID_ENV_NAME, + UA_APP_ID_INI_NAME: () => UA_APP_ID_INI_NAME, + createDefaultUserAgentProvider: () => createDefaultUserAgentProvider, + crtAvailability: () => crtAvailability, + defaultUserAgent: () => defaultUserAgent +}); +module.exports = __toCommonJS(src_exports); + +// src/defaultUserAgent.ts +var import_os = __nccwpck_require__(857); +var import_process = __nccwpck_require__(932); + +// src/crt-availability.ts +var crtAvailability = { + isCrtAvailable: false +}; + +// src/is-crt-available.ts +var isCrtAvailable = /* @__PURE__ */ __name(() => { + if (crtAvailability.isCrtAvailable) { + return ["md/crt-avail"]; + } + return null; +}, "isCrtAvailable"); + +// src/defaultUserAgent.ts +var createDefaultUserAgentProvider = /* @__PURE__ */ __name(({ serviceId, clientVersion }) => { + return async (config) => { + var _a; + const sections = [ + // sdk-metadata + ["aws-sdk-js", clientVersion], + // ua-metadata + ["ua", "2.1"], + // os-metadata + [`os/${(0, import_os.platform)()}`, (0, import_os.release)()], + // language-metadata + // ECMAScript edition doesn't matter in JS, so no version needed. + ["lang/js"], + ["md/nodejs", `${import_process.versions.node}`] + ]; + const crtAvailable = isCrtAvailable(); + if (crtAvailable) { + sections.push(crtAvailable); + } + if (serviceId) { + sections.push([`api/${serviceId}`, clientVersion]); + } + if (import_process.env.AWS_EXECUTION_ENV) { + sections.push([`exec-env/${import_process.env.AWS_EXECUTION_ENV}`]); + } + const appId = await ((_a = config == null ? void 0 : config.userAgentAppId) == null ? void 0 : _a.call(config)); + const resolvedUserAgent = appId ? [...sections, [`app/${appId}`]] : [...sections]; + return resolvedUserAgent; + }; +}, "createDefaultUserAgentProvider"); +var defaultUserAgent = createDefaultUserAgentProvider; + +// src/nodeAppIdConfigOptions.ts +var import_middleware_user_agent = __nccwpck_require__(2959); +var UA_APP_ID_ENV_NAME = "AWS_SDK_UA_APP_ID"; +var UA_APP_ID_INI_NAME = "sdk_ua_app_id"; +var UA_APP_ID_INI_NAME_DEPRECATED = "sdk-ua-app-id"; +var NODE_APP_ID_CONFIG_OPTIONS = { + environmentVariableSelector: (env2) => env2[UA_APP_ID_ENV_NAME], + configFileSelector: (profile) => profile[UA_APP_ID_INI_NAME] ?? profile[UA_APP_ID_INI_NAME_DEPRECATED], + default: import_middleware_user_agent.DEFAULT_UA_APP_ID +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 9228: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = __nccwpck_require__(3890); + + +/***/ }), + +/***/ 3890: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +// Generated by CoffeeScript 2.6.1 +(function() { + /* + Module difflib -- helpers for computing deltas between objects. + + Function getCloseMatches(word, possibilities, n=3, cutoff=0.6): + Use SequenceMatcher to return list of the best "good enough" matches. + + Function contextDiff(a, b): + For two lists of strings, return a delta in context diff format. + + Function ndiff(a, b): + Return a delta: the difference between `a` and `b` (lists of strings). + + Function restore(delta, which): + Return one of the two sequences that generated an ndiff delta. + + Function unifiedDiff(a, b): + For two lists of strings, return a delta in unified diff format. + + Class SequenceMatcher: + A flexible class for comparing pairs of sequences of any type. + + Class Differ: + For producing human-readable deltas from sequences of lines of text. + */ + var Differ, Heap, IS_CHARACTER_JUNK, IS_LINE_JUNK, SequenceMatcher, _any, _arrayCmp, _calculateRatio, _countLeading, _formatRangeContext, _formatRangeUnified, _has, assert, contextDiff, floor, getCloseMatches, max, min, ndiff, restore, unifiedDiff, + indexOf = [].indexOf; + + // Requires + ({floor, max, min} = Math); + + Heap = __nccwpck_require__(1622); + + assert = __nccwpck_require__(2613); + + // Helper functions + _calculateRatio = function(matches, length) { + if (length) { + return 2.0 * matches / length; + } else { + return 1.0; + } + }; + + _arrayCmp = function(a, b) { + var i, l, la, lb, ref; + [la, lb] = [a.length, b.length]; + for (i = l = 0, ref = min(la, lb); (0 <= ref ? l < ref : l > ref); i = 0 <= ref ? ++l : --l) { + if (a[i] < b[i]) { + return -1; + } + if (a[i] > b[i]) { + return 1; + } + } + return la - lb; + }; + + _has = function(obj, key) { + return Object.prototype.hasOwnProperty.call(obj, key); + }; + + _any = function(items) { + var item, l, len; + for (l = 0, len = items.length; l < len; l++) { + item = items[l]; + if (item) { + return true; + } + } + return false; + }; + + SequenceMatcher = class SequenceMatcher { + /* + SequenceMatcher is a flexible class for comparing pairs of sequences of + any type, so long as the sequence elements are hashable. The basic + algorithm predates, and is a little fancier than, an algorithm + published in the late 1980's by Ratcliff and Obershelp under the + hyperbolic name "gestalt pattern matching". The basic idea is to find + the longest contiguous matching subsequence that contains no "junk" + elements (R-O doesn't address junk). The same idea is then applied + recursively to the pieces of the sequences to the left and to the right + of the matching subsequence. This does not yield minimal edit + sequences, but does tend to yield matches that "look right" to people. + + SequenceMatcher tries to compute a "human-friendly diff" between two + sequences. Unlike e.g. UNIX(tm) diff, the fundamental notion is the + longest *contiguous* & junk-free matching subsequence. That's what + catches peoples' eyes. The Windows(tm) windiff has another interesting + notion, pairing up elements that appear uniquely in each sequence. + That, and the method here, appear to yield more intuitive difference + reports than does diff. This method appears to be the least vulnerable + to synching up on blocks of "junk lines", though (like blank lines in + ordinary text files, or maybe "

" lines in HTML files). That may be + because this is the only method of the 3 that has a *concept* of + "junk" . + + Example, comparing two strings, and considering blanks to be "junk": + + >>> isjunk = (c) -> c is ' ' + >>> s = new SequenceMatcher(isjunk, + 'private Thread currentThread;', + 'private volatile Thread currentThread;') + + .ratio() returns a float in [0, 1], measuring the "similarity" of the + sequences. As a rule of thumb, a .ratio() value over 0.6 means the + sequences are close matches: + + >>> s.ratio().toPrecision(3) + '0.866' + + If you're only interested in where the sequences match, + .getMatchingBlocks() is handy: + + >>> for [a, b, size] in s.getMatchingBlocks() + ... console.log("a[#{a}] and b[#{b}] match for #{size} elements"); + a[0] and b[0] match for 8 elements + a[8] and b[17] match for 21 elements + a[29] and b[38] match for 0 elements + + Note that the last tuple returned by .get_matching_blocks() is always a + dummy, (len(a), len(b), 0), and this is the only case in which the last + tuple element (number of elements matched) is 0. + + If you want to know how to change the first sequence into the second, + use .get_opcodes(): + + >>> for [op, a1, a2, b1, b2] in s.getOpcodes() + ... console.log "#{op} a[#{a1}:#{a2}] b[#{b1}:#{b2}]" + equal a[0:8] b[0:8] + insert a[8:8] b[8:17] + equal a[8:29] b[17:38] + + See the Differ class for a fancy human-friendly file differencer, which + uses SequenceMatcher both to compare sequences of lines, and to compare + sequences of characters within similar (near-matching) lines. + + See also function getCloseMatches() in this module, which shows how + simple code building on SequenceMatcher can be used to do useful work. + + Timing: Basic R-O is cubic time worst case and quadratic time expected + case. SequenceMatcher is quadratic time for the worst case and has + expected-case behavior dependent in a complicated way on how many + elements the sequences have in common; best case time is linear. + + Methods: + + constructor(isjunk=null, a='', b='') + Construct a SequenceMatcher. + + setSeqs(a, b) + Set the two sequences to be compared. + + setSeq1(a) + Set the first sequence to be compared. + + setSeq2(b) + Set the second sequence to be compared. + + findLongestMatch(alo, ahi, blo, bhi) + Find longest matching block in a[alo:ahi] and b[blo:bhi]. + + getMatchingBlocks() + Return list of triples describing matching subsequences. + + getOpcodes() + Return list of 5-tuples describing how to turn a into b. + + ratio() + Return a measure of the sequences' similarity (float in [0,1]). + + quickRatio() + Return an upper bound on .ratio() relatively quickly. + + realQuickRatio() + Return an upper bound on ratio() very quickly. + */ + constructor(isjunk1, a = '', b = '', autojunk = true) { + this.isjunk = isjunk1; + this.autojunk = autojunk; + /* + Construct a SequenceMatcher. + + Optional arg isjunk is null (the default), or a one-argument + function that takes a sequence element and returns true iff the + element is junk. Null is equivalent to passing "(x) -> 0", i.e. + no elements are considered to be junk. For example, pass + (x) -> x in ' \t' + if you're comparing lines as sequences of characters, and don't + want to synch up on blanks or hard tabs. + + Optional arg a is the first of two sequences to be compared. By + default, an empty string. The elements of a must be hashable. See + also .setSeqs() and .setSeq1(). + + Optional arg b is the second of two sequences to be compared. By + default, an empty string. The elements of b must be hashable. See + also .setSeqs() and .setSeq2(). + + Optional arg autojunk should be set to false to disable the + "automatic junk heuristic" that treats popular elements as junk + (see module documentation for more information). + */ + // Members: + // a + // first sequence + // b + // second sequence; differences are computed as "what do + // we need to do to 'a' to change it into 'b'?" + // b2j + // for x in b, b2j[x] is a list of the indices (into b) + // at which x appears; junk elements do not appear + // fullbcount + // for x in b, fullbcount[x] == the number of times x + // appears in b; only materialized if really needed (used + // only for computing quickRatio()) + // matchingBlocks + // a list of [i, j, k] triples, where a[i...i+k] == b[j...j+k]; + // ascending & non-overlapping in i and in j; terminated by + // a dummy (len(a), len(b), 0) sentinel + // opcodes + // a list of [tag, i1, i2, j1, j2] tuples, where tag is + // one of + // 'replace' a[i1...i2] should be replaced by b[j1...j2] + // 'delete' a[i1...i2] should be deleted + // 'insert' b[j1...j2] should be inserted + // 'equal' a[i1...i2] == b[j1...j2] + // isjunk + // a user-supplied function taking a sequence element and + // returning true iff the element is "junk" -- this has + // subtle but helpful effects on the algorithm, which I'll + // get around to writing up someday <0.9 wink>. + // DON'T USE! Only __chainB uses this. Use isbjunk. + // isbjunk + // for x in b, isbjunk(x) == isjunk(x) but much faster; + // DOES NOT WORK for x in a! + // isbpopular + // for x in b, isbpopular(x) is true iff b is reasonably long + // (at least 200 elements) and x accounts for more than 1 + 1% of + // its elements (when autojunk is enabled). + // DOES NOT WORK for x in a! + this.a = this.b = null; + this.setSeqs(a, b); + } + + setSeqs(a, b) { + /* + Set the two sequences to be compared. + + >>> s = new SequenceMatcher() + >>> s.setSeqs('abcd', 'bcde') + >>> s.ratio() + 0.75 + */ + this.setSeq1(a); + return this.setSeq2(b); + } + + setSeq1(a) { + /* + Set the first sequence to be compared. + + The second sequence to be compared is not changed. + + >>> s = new SequenceMatcher(null, 'abcd', 'bcde') + >>> s.ratio() + 0.75 + >>> s.setSeq1('bcde') + >>> s.ratio() + 1.0 + + SequenceMatcher computes and caches detailed information about the + second sequence, so if you want to compare one sequence S against + many sequences, use .setSeq2(S) once and call .setSeq1(x) + repeatedly for each of the other sequences. + + See also setSeqs() and setSeq2(). + */ + if (a === this.a) { + return; + } + this.a = a; + return this.matchingBlocks = this.opcodes = null; + } + + setSeq2(b) { + /* + Set the second sequence to be compared. + + The first sequence to be compared is not changed. + + >>> s = new SequenceMatcher(null, 'abcd', 'bcde') + >>> s.ratio() + 0.75 + >>> s.setSeq2('abcd') + >>> s.ratio() + 1.0 + + SequenceMatcher computes and caches detailed information about the + second sequence, so if you want to compare one sequence S against + many sequences, use .setSeq2(S) once and call .setSeq1(x) + repeatedly for each of the other sequences. + + See also setSeqs() and setSeq1(). + */ + if (b === this.b) { + return; + } + this.b = b; + this.matchingBlocks = this.opcodes = null; + this.fullbcount = null; + return this._chainB(); + } + + // For each element x in b, set b2j[x] to a list of the indices in + // b where x appears; the indices are in increasing order; note that + // the number of times x appears in b is b2j[x].length ... + // when @isjunk is defined, junk elements don't show up in this + // map at all, which stops the central findLongestMatch method + // from starting any matching block at a junk element ... + // also creates the fast isbjunk function ... + // b2j also does not contain entries for "popular" elements, meaning + // elements that account for more than 1 + 1% of the total elements, and + // when the sequence is reasonably large (>= 200 elements); this can + // be viewed as an adaptive notion of semi-junk, and yields an enormous + // speedup when, e.g., comparing program files with hundreds of + // instances of "return null;" ... + // note that this is only called when b changes; so for cross-product + // kinds of matches, it's best to call setSeq2 once, then setSeq1 + // repeatedly + _chainB() { + var b, b2j, elt, i, indices, isjunk, junk, l, len, n, ntest, popular; + // Because isjunk is a user-defined function, and we test + // for junk a LOT, it's important to minimize the number of calls. + // Before the tricks described here, __chainB was by far the most + // time-consuming routine in the whole module! If anyone sees + // Jim Roskind, thank him again for profile.py -- I never would + // have guessed that. + // The first trick is to build b2j ignoring the possibility + // of junk. I.e., we don't call isjunk at all yet. Throwing + // out the junk later is much cheaper than building b2j "right" + // from the start. + b = this.b; + this.b2j = b2j = new Map(); + for (i = l = 0, len = b.length; l < len; i = ++l) { + elt = b[i]; + if (!b2j.has(elt)) { + b2j.set(elt, []); + } + indices = b2j.get(elt); + indices.push(i); + } + // Purge junk elements + junk = new Map(); + isjunk = this.isjunk; + if (isjunk) { + b2j.forEach(function(idxs, elt) { + if (isjunk(elt)) { + junk.set(elt, true); + return b2j.delete(elt); + } + }); + } + // Purge popular elements that are not junk + popular = new Map(); + n = b.length; + if (this.autojunk && n >= 200) { + ntest = floor(n / 100) + 1; + b2j.forEach(function(idxs, elt) { + if (idxs.length > ntest) { + popular.set(elt, true); + return b2j.delete(elt); + } + }); + } + // Now for x in b, isjunk(x) == x in junk, but the latter is much faster. + // Sicne the number of *unique* junk elements is probably small, the + // memory burden of keeping this set alive is likely trivial compared to + // the size of b2j. + this.isbjunk = function(b) { + return junk.has(b); + }; + return this.isbpopular = function(b) { + return popular.has(b); + }; + } + + findLongestMatch(alo, ahi, blo, bhi) { + var a, b, b2j, besti, bestj, bestsize, i, isbjunk, j, j2len, jlist, k, l, len, m, newj2len, ref, ref1; + /* + Find longest matching block in a[alo...ahi] and b[blo...bhi]. + + If isjunk is not defined: + + Return [i,j,k] such that a[i...i+k] is equal to b[j...j+k], where + alo <= i <= i+k <= ahi + blo <= j <= j+k <= bhi + and for all [i',j',k'] meeting those conditions, + k >= k' + i <= i' + and if i == i', j <= j' + + In other words, of all maximal matching blocks, return one that + starts earliest in a, and of all those maximal matching blocks that + start earliest in a, return the one that starts earliest in b. + + >>> isjunk = (x) -> x is ' ' + >>> s = new SequenceMatcher(isjunk, ' abcd', 'abcd abcd') + >>> s.findLongestMatch(0, 5, 0, 9) + [1, 0, 4] + + >>> s = new SequenceMatcher(null, 'ab', 'c') + >>> s.findLongestMatch(0, 2, 0, 1) + [0, 0, 0] + */ + // CAUTION: stripping common prefix or suffix would be incorrect. + // E.g., + // ab + // acab + // Longest matching block is "ab", but if common prefix is + // stripped, it's "a" (tied with "b"). UNIX(tm) diff does so + // strip, so ends up claiming that ab is changed to acab by + // inserting "ca" in the middle. That's minimal but unintuitive: + // "it's obvious" that someone inserted "ac" at the front. + // Windiff ends up at the same place as diff, but by pairing up + // the unique 'b's and then matching the first two 'a's. + [a, b, b2j, isbjunk] = [this.a, this.b, this.b2j, this.isbjunk]; + [besti, bestj, bestsize] = [alo, blo, 0]; + // find longest junk-free match + // during an iteration of the loop, j2len[j] = length of longest + // junk-free match ending with a[i-1] and b[j] + j2len = {}; + for (i = l = ref = alo, ref1 = ahi; (ref <= ref1 ? l < ref1 : l > ref1); i = ref <= ref1 ? ++l : --l) { + // look at all instances of a[i] in b; note that because + // b2j has no junk keys, the loop is skipped if a[i] is junk + newj2len = {}; + jlist = []; + if (b2j.has(a[i])) { + jlist = b2j.get(a[i]); + } + for (m = 0, len = jlist.length; m < len; m++) { + j = jlist[m]; + if (j < blo) { + // a[i] matches b[j] + continue; + } + if (j >= bhi) { + break; + } + k = newj2len[j] = (j2len[j - 1] || 0) + 1; + if (k > bestsize) { + [besti, bestj, bestsize] = [i - k + 1, j - k + 1, k]; + } + } + j2len = newj2len; + } + // Extend the best by non-junk elements on each end. In particular, + // "popular" non-junk elements aren't in b2j, which greatly speeds + // the inner loop above, but also means "the best" match so far + // doesn't contain any junk *or* popular non-junk elements. + while (besti > alo && bestj > blo && !isbjunk(b[bestj - 1]) && a[besti - 1] === b[bestj - 1]) { + [besti, bestj, bestsize] = [besti - 1, bestj - 1, bestsize + 1]; + } + while (besti + bestsize < ahi && bestj + bestsize < bhi && !isbjunk(b[bestj + bestsize]) && a[besti + bestsize] === b[bestj + bestsize]) { + bestsize++; + } + // Now that we have a wholly interesting match (albeit possibly + // empty!), we may as well suck up the matching junk on each + // side of it too. Can't think of a good reason not to, and it + // saves post-processing the (possibly considerable) expense of + // figuring out what to do with it. In the case of an empty + // interesting match, this is clearly the right thing to do, + // because no other kind of match is possible in the regions. + while (besti > alo && bestj > blo && isbjunk(b[bestj - 1]) && a[besti - 1] === b[bestj - 1]) { + [besti, bestj, bestsize] = [besti - 1, bestj - 1, bestsize + 1]; + } + while (besti + bestsize < ahi && bestj + bestsize < bhi && isbjunk(b[bestj + bestsize]) && a[besti + bestsize] === b[bestj + bestsize]) { + bestsize++; + } + return [besti, bestj, bestsize]; + } + + getMatchingBlocks() { + var ahi, alo, bhi, blo, i, i1, i2, j, j1, j2, k, k1, k2, l, la, lb, len, matchingBlocks, nonAdjacent, queue, x; + if (this.matchingBlocks) { + /* + Return list of triples describing matching subsequences. + + Each triple is of the form [i, j, n], and means that + a[i...i+n] == b[j...j+n]. The triples are monotonically increasing in + i and in j. it's also guaranteed that if + [i, j, n] and [i', j', n'] are adjacent triples in the list, and + the second is not the last triple in the list, then i+n != i' or + j+n != j'. IOW, adjacent triples never describe adjacent equal + blocks. + + The last triple is a dummy, [a.length, b.length, 0], and is the only + triple with n==0. + + >>> s = new SequenceMatcher(null, 'abxcd', 'abcd') + >>> s.getMatchingBlocks() + [[0, 0, 2], [3, 2, 2], [5, 4, 0]] + + */ + return this.matchingBlocks; + } + [la, lb] = [this.a.length, this.b.length]; + // This is most naturally expressed as a recursive algorithm, but + // at least one user bumped into extreme use cases that exceeded + // the recursion limit on their box. So, now we maintain a list + // ('queue`) of blocks we still need to look at, and append partial + // results to `matching_blocks` in a loop; the matches are sorted + // at the end. + queue = [[0, la, 0, lb]]; + matchingBlocks = []; + while (queue.length) { + [alo, ahi, blo, bhi] = queue.pop(); + [i, j, k] = x = this.findLongestMatch(alo, ahi, blo, bhi); + // a[alo...i] vs b[blo...j] unknown + // a[i...i+k] same as b[j...j+k] + // a[i+k...ahi] vs b[j+k...bhi] unknown + if (k) { + matchingBlocks.push(x); + if (alo < i && blo < j) { + queue.push([alo, i, blo, j]); + } + if (i + k < ahi && j + k < bhi) { + queue.push([i + k, ahi, j + k, bhi]); + } + } + } + matchingBlocks.sort(_arrayCmp); + // It's possible that we have adjacent equal blocks in the + // matching_blocks list now. + i1 = j1 = k1 = 0; + nonAdjacent = []; + for (l = 0, len = matchingBlocks.length; l < len; l++) { + [i2, j2, k2] = matchingBlocks[l]; + // Is this block adjacent to i1, j1, k1? + if (i1 + k1 === i2 && j1 + k1 === j2) { + // Yes, so collapse them -- this just increases the length of + // the first block by the length of the second, and the first + // block so lengthened remains the block to compare against. + k1 += k2; + } else { + // Not adjacent. Remember the first block (k1==0 means it's + // the dummy we started with), and make the second block the + // new block to compare against. + if (k1) { + nonAdjacent.push([i1, j1, k1]); + } + [i1, j1, k1] = [i2, j2, k2]; + } + } + if (k1) { + nonAdjacent.push([i1, j1, k1]); + } + nonAdjacent.push([la, lb, 0]); + return this.matchingBlocks = nonAdjacent; + } + + getOpcodes() { + var ai, answer, bj, i, j, l, len, ref, size, tag; + if (this.opcodes) { + /* + Return list of 5-tuples describing how to turn a into b. + + Each tuple is of the form [tag, i1, i2, j1, j2]. The first tuple + has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the + tuple preceding it, and likewise for j1 == the previous j2. + + The tags are strings, with these meanings: + + 'replace': a[i1...i2] should be replaced by b[j1...j2] + 'delete': a[i1...i2] should be deleted. + Note that j1==j2 in this case. + 'insert': b[j1...j2] should be inserted at a[i1...i1]. + Note that i1==i2 in this case. + 'equal': a[i1...i2] == b[j1...j2] + + >>> s = new SequenceMatcher(null, 'qabxcd', 'abycdf') + >>> s.getOpcodes() + [ [ 'delete' , 0 , 1 , 0 , 0 ] , + [ 'equal' , 1 , 3 , 0 , 2 ] , + [ 'replace' , 3 , 4 , 2 , 3 ] , + [ 'equal' , 4 , 6 , 3 , 5 ] , + [ 'insert' , 6 , 6 , 5 , 6 ] ] + */ + return this.opcodes; + } + i = j = 0; + this.opcodes = answer = []; + ref = this.getMatchingBlocks(); + for (l = 0, len = ref.length; l < len; l++) { + [ai, bj, size] = ref[l]; + // invariant: we've pumped out correct diffs to change + // a[0...i] into b[0...j], and the next matching block is + // a[ai...ai+size] == b[bj...bj+size]. So we need to pump + // out a diff to change a[i:ai] into b[j...bj], pump out + // the matching block, and move [i,j] beyond the match + tag = ''; + if (i < ai && j < bj) { + tag = 'replace'; + } else if (i < ai) { + tag = 'delete'; + } else if (j < bj) { + tag = 'insert'; + } + if (tag) { + answer.push([tag, i, ai, j, bj]); + } + [i, j] = [ai + size, bj + size]; + // the list of matching blocks is terminated by a + // sentinel with size 0 + if (size) { + answer.push(['equal', ai, i, bj, j]); + } + } + return answer; + } + + getGroupedOpcodes(n = 3) { + /* + Isolate change clusters by eliminating ranges with no changes. + + Return a list groups with upto n lines of context. + Each group is in the same format as returned by get_opcodes(). + + >>> a = [1...40].map(String) + >>> b = a.slice() + >>> b[8...8] = 'i' + >>> b[20] += 'x' + >>> b[23...28] = [] + >>> b[30] += 'y' + >>> s = new SequenceMatcher(null, a, b) + >>> s.getGroupedOpcodes() + [ [ [ 'equal' , 5 , 8 , 5 , 8 ], + [ 'insert' , 8 , 8 , 8 , 9 ], + [ 'equal' , 8 , 11 , 9 , 12 ] ], + [ [ 'equal' , 16 , 19 , 17 , 20 ], + [ 'replace' , 19 , 20 , 20 , 21 ], + [ 'equal' , 20 , 22 , 21 , 23 ], + [ 'delete' , 22 , 27 , 23 , 23 ], + [ 'equal' , 27 , 30 , 23 , 26 ] ], + [ [ 'equal' , 31 , 34 , 27 , 30 ], + [ 'replace' , 34 , 35 , 30 , 31 ], + [ 'equal' , 35 , 38 , 31 , 34 ] ] ] + */ + var codes, group, groups, i1, i2, j1, j2, l, len, nn, tag; + codes = this.getOpcodes(); + if (!codes.length) { + codes = [['equal', 0, 1, 0, 1]]; + } + // Fixup leading and trailing groups if they show no changes. + if (codes[0][0] === 'equal') { + [tag, i1, i2, j1, j2] = codes[0]; + codes[0] = [tag, max(i1, i2 - n), i2, max(j1, j2 - n), j2]; + } + if (codes[codes.length - 1][0] === 'equal') { + [tag, i1, i2, j1, j2] = codes[codes.length - 1]; + codes[codes.length - 1] = [tag, i1, min(i2, i1 + n), j1, min(j2, j1 + n)]; + } + nn = n + n; + groups = []; + group = []; + for (l = 0, len = codes.length; l < len; l++) { + [tag, i1, i2, j1, j2] = codes[l]; + // End the current group and start a new one whenever + // there is a large range with no changes. + if (tag === 'equal' && i2 - i1 > nn) { + group.push([tag, i1, min(i2, i1 + n), j1, min(j2, j1 + n)]); + groups.push(group); + group = []; + [i1, j1] = [max(i1, i2 - n), max(j1, j2 - n)]; + } + group.push([tag, i1, i2, j1, j2]); + } + if (group.length && !(group.length === 1 && group[0][0] === 'equal')) { + groups.push(group); + } + return groups; + } + + ratio() { + /* + Return a measure of the sequences' similarity (float in [0,1]). + + Where T is the total number of elements in both sequences, and + M is the number of matches, this is 2.0*M / T. + Note that this is 1 if the sequences are identical, and 0 if + they have nothing in common. + + .ratio() is expensive to compute if you haven't already computed + .getMatchingBlocks() or .getOpcodes(), in which case you may + want to try .quickRatio() or .realQuickRatio() first to get an + upper bound. + + >>> s = new SequenceMatcher(null, 'abcd', 'bcde') + >>> s.ratio() + 0.75 + >>> s.quickRatio() + 0.75 + >>> s.realQuickRatio() + 1.0 + */ + var l, len, match, matches, ref; + matches = 0; + ref = this.getMatchingBlocks(); + for (l = 0, len = ref.length; l < len; l++) { + match = ref[l]; + matches += match[2]; + } + return _calculateRatio(matches, this.a.length + this.b.length); + } + + quickRatio() { + var avail, elt, fullbcount, l, len, len1, m, matches, numb, ref, ref1; + /* + Return an upper bound on ratio() relatively quickly. + + This isn't defined beyond that it is an upper bound on .ratio(), and + is faster to compute. + */ + // viewing a and b as multisets, set matches to the cardinality + // of their intersection; this counts the number of matches + // without regard to order, so is clearly an upper bound + if (!this.fullbcount) { + this.fullbcount = fullbcount = {}; + ref = this.b; + for (l = 0, len = ref.length; l < len; l++) { + elt = ref[l]; + fullbcount[elt] = (fullbcount[elt] || 0) + 1; + } + } + fullbcount = this.fullbcount; + // avail[x] is the number of times x appears in 'b' less the + // number of times we've seen it in 'a' so far ... kinda + avail = {}; + matches = 0; + ref1 = this.a; + for (m = 0, len1 = ref1.length; m < len1; m++) { + elt = ref1[m]; + if (_has(avail, elt)) { + numb = avail[elt]; + } else { + numb = fullbcount[elt] || 0; + } + avail[elt] = numb - 1; + if (numb > 0) { + matches++; + } + } + return _calculateRatio(matches, this.a.length + this.b.length); + } + + realQuickRatio() { + /* + Return an upper bound on ratio() very quickly. + + This isn't defined beyond that it is an upper bound on .ratio(), and + is faster to compute than either .ratio() or .quickRatio(). + */ + var la, lb; + [la, lb] = [this.a.length, this.b.length]; + // can't have more matches than the number of elements in the + // shorter sequence + return _calculateRatio(min(la, lb), la + lb); + } + + }; + + getCloseMatches = function(word, possibilities, n = 3, cutoff = 0.6) { + var l, len, len1, m, result, results, s, score, x; + /* + Use SequenceMatcher to return list of the best "good enough" matches. + + word is a sequence for which close matches are desired (typically a + string). + + possibilities is a list of sequences against which to match word + (typically a list of strings). + + Optional arg n (default 3) is the maximum number of close matches to + return. n must be > 0. + + Optional arg cutoff (default 0.6) is a float in [0, 1]. Possibilities + that don't score at least that similar to word are ignored. + + The best (no more than n) matches among the possibilities are returned + in a list, sorted by similarity score, most similar first. + + >>> getCloseMatches('appel', ['ape', 'apple', 'peach', 'puppy']) + ['apple', 'ape'] + >>> KEYWORDS = require('coffee-script').RESERVED + >>> getCloseMatches('wheel', KEYWORDS) + ['when', 'while'] + >>> getCloseMatches('accost', KEYWORDS) + ['const'] + */ + if (!(n > 0)) { + throw new Error(`n must be > 0: (${n})`); + } + if (!((0.0 <= cutoff && cutoff <= 1.0))) { + throw new Error(`cutoff must be in [0.0, 1.0]: (${cutoff})`); + } + result = []; + s = new SequenceMatcher(); + s.setSeq2(word); + for (l = 0, len = possibilities.length; l < len; l++) { + x = possibilities[l]; + s.setSeq1(x); + if (s.realQuickRatio() >= cutoff && s.quickRatio() >= cutoff && s.ratio() >= cutoff) { + result.push([s.ratio(), x]); + } + } + // Move the best scorers to head of list + result = Heap.nlargest(result, n, _arrayCmp); + results = []; + for (m = 0, len1 = result.length; m < len1; m++) { + [score, x] = result[m]; + // Strip scores for the best n matches + results.push(x); + } + return results; + }; + + _countLeading = function(line, ch) { + /* + Return number of `ch` characters at the start of `line`. + + >>> _countLeading(' abc', ' ') + 3 + */ + var i, n; + [i, n] = [0, line.length]; + while (i < n && line[i] === ch) { + i++; + } + return i; + }; + + Differ = class Differ { + /* + Differ is a class for comparing sequences of lines of text, and + producing human-readable differences or deltas. Differ uses + SequenceMatcher both to compare sequences of lines, and to compare + sequences of characters within similar (near-matching) lines. + + Each line of a Differ delta begins with a two-letter code: + + '- ' line unique to sequence 1 + '+ ' line unique to sequence 2 + ' ' line common to both sequences + '? ' line not present in either input sequence + + Lines beginning with '? ' attempt to guide the eye to intraline + differences, and were not present in either input sequence. These lines + can be confusing if the sequences contain tab characters. + + Note that Differ makes no claim to produce a *minimal* diff. To the + contrary, minimal diffs are often counter-intuitive, because they synch + up anywhere possible, sometimes accidental matches 100 pages apart. + Restricting synch points to contiguous matches preserves some notion of + locality, at the occasional cost of producing a longer diff. + + Example: Comparing two texts. + + >>> text1 = ['1. Beautiful is better than ugly.\n', + ... '2. Explicit is better than implicit.\n', + ... '3. Simple is better than complex.\n', + ... '4. Complex is better than complicated.\n'] + >>> text1.length + 4 + >>> text2 = ['1. Beautiful is better than ugly.\n', + ... '3. Simple is better than complex.\n', + ... '4. Complicated is better than complex.\n', + ... '5. Flat is better than nested.\n'] + + Next we instantiate a Differ object: + + >>> d = new Differ() + + Note that when instantiating a Differ object we may pass functions to + filter out line and character 'junk'. + + Finally, we compare the two: + + >>> result = d.compare(text1, text2) + [ ' 1. Beautiful is better than ugly.\n', + '- 2. Explicit is better than implicit.\n', + '- 3. Simple is better than complex.\n', + '+ 3. Simple is better than complex.\n', + '? ++\n', + '- 4. Complex is better than complicated.\n', + '? ^ ---- ^\n', + '+ 4. Complicated is better than complex.\n', + '? ++++ ^ ^\n', + '+ 5. Flat is better than nested.\n' ] + + Methods: + + constructor(linejunk=null, charjunk=null) + Construct a text differencer, with optional filters. + compare(a, b) + Compare two sequences of lines; generate the resulting delta. + */ + constructor(linejunk1, charjunk1) { + this.linejunk = linejunk1; + this.charjunk = charjunk1; + } + + /* + Construct a text differencer, with optional filters. + + The two optional keyword parameters are for filter functions: + + - `linejunk`: A function that should accept a single string argument, + and return true iff the string is junk. The module-level function + `IS_LINE_JUNK` may be used to filter out lines without visible + characters, except for at most one splat ('#'). It is recommended + to leave linejunk null. + + - `charjunk`: A function that should accept a string of length 1. The + module-level function `IS_CHARACTER_JUNK` may be used to filter out + whitespace characters (a blank or tab; **note**: bad idea to include + newline in this!). Use of IS_CHARACTER_JUNK is recommended. + */ + compare(a, b) { + /* + Compare two sequences of lines; generate the resulting delta. + + Each sequence must contain individual single-line strings ending with + newlines. Such sequences can be obtained from the `readlines()` method + of file-like objects. The delta generated also consists of newline- + terminated strings, ready to be printed as-is via the writeline() + method of a file-like object. + + Example: + + >>> d = new Differ + >>> d.compare(['one\n', 'two\n', 'three\n'], + ... ['ore\n', 'tree\n', 'emu\n']) + [ '- one\n', + '? ^\n', + '+ ore\n', + '? ^\n', + '- two\n', + '- three\n', + '? -\n', + '+ tree\n', + '+ emu\n' ] + */ + var ahi, alo, bhi, blo, cruncher, g, l, len, len1, line, lines, m, ref, tag; + cruncher = new SequenceMatcher(this.linejunk, a, b); + lines = []; + ref = cruncher.getOpcodes(); + for (l = 0, len = ref.length; l < len; l++) { + [tag, alo, ahi, blo, bhi] = ref[l]; + switch (tag) { + case 'replace': + g = this._fancyReplace(a, alo, ahi, b, blo, bhi); + break; + case 'delete': + g = this._dump('-', a, alo, ahi); + break; + case 'insert': + g = this._dump('+', b, blo, bhi); + break; + case 'equal': + g = this._dump(' ', a, alo, ahi); + break; + default: + throw new Error(`unknow tag (${tag})`); + } + for (m = 0, len1 = g.length; m < len1; m++) { + line = g[m]; + lines.push(line); + } + } + return lines; + } + + _dump(tag, x, lo, hi) { + var i, l, ref, ref1, results; + results = []; + for (i = l = ref = lo, ref1 = hi; (ref <= ref1 ? l < ref1 : l > ref1); i = ref <= ref1 ? ++l : --l) { + /* + Generate comparison results for a same-tagged range. + */ + results.push(`${tag} ${x[i]}`); + } + return results; + } + + _plainReplace(a, alo, ahi, b, blo, bhi) { + var first, g, l, len, len1, line, lines, m, ref, second; + assert(alo < ahi && blo < bhi); + // dump the shorter block first -- reduces the burden on short-term + // memory if the blocks are of very different sizes + if (bhi - blo < ahi - alo) { + first = this._dump('+', b, blo, bhi); + second = this._dump('-', a, alo, ahi); + } else { + first = this._dump('-', a, alo, ahi); + second = this._dump('+', b, blo, bhi); + } + lines = []; + ref = [first, second]; + for (l = 0, len = ref.length; l < len; l++) { + g = ref[l]; + for (m = 0, len1 = g.length; m < len1; m++) { + line = g[m]; + lines.push(line); + } + } + return lines; + } + + _fancyReplace(a, alo, ahi, b, blo, bhi) { + var aelt, ai, ai1, ai2, atags, belt, bestRatio, besti, bestj, bj, bj1, bj2, btags, cruncher, cutoff, eqi, eqj, i, j, l, la, lb, len, len1, len2, len3, len4, line, lines, m, o, p, q, r, ref, ref1, ref2, ref3, ref4, ref5, ref6, ref7, ref8, t, tag; + /* + When replacing one block of lines with another, search the blocks + for *similar* lines; the best-matching pair (if any) is used as a + synch point, and intraline difference marking is done on the + similar pair. Lots of work, but often worth it. + + Example: + >>> d = new Differ + >>> d._fancyReplace(['abcDefghiJkl\n'], 0, 1, + ... ['abcdefGhijkl\n'], 0, 1) + [ '- abcDefghiJkl\n', + '? ^ ^ ^\n', + '+ abcdefGhijkl\n', + '? ^ ^ ^\n' ] + */ + // don't synch up unless the lines have a similarity score of at + // least cutoff; best_ratio tracks the best score seen so far + [bestRatio, cutoff] = [0.74, 0.75]; + cruncher = new SequenceMatcher(this.charjunk); + [eqi, eqj] = [ + null, + null // 1st indices of equal lines (if any) + ]; + lines = []; +// search for the pair that matches best without being identical +// (identical lines must be junk lines, & we don't want to synch up +// on junk -- unless we have to) + for (j = l = ref = blo, ref1 = bhi; (ref <= ref1 ? l < ref1 : l > ref1); j = ref <= ref1 ? ++l : --l) { + bj = b[j]; + cruncher.setSeq2(bj); + for (i = m = ref2 = alo, ref3 = ahi; (ref2 <= ref3 ? m < ref3 : m > ref3); i = ref2 <= ref3 ? ++m : --m) { + ai = a[i]; + if (ai === bj) { + if (eqi === null) { + [eqi, eqj] = [i, j]; + } + continue; + } + cruncher.setSeq1(ai); + // computing similarity is expensive, so use the quick + // upper bounds first -- have seen this speed up messy + // compares by a factor of 3. + // note that ratio() is only expensive to compute the first + // time it's called on a sequence pair; the expensive part + // of the computation is cached by cruncher + if (cruncher.realQuickRatio() > bestRatio && cruncher.quickRatio() > bestRatio && cruncher.ratio() > bestRatio) { + [bestRatio, besti, bestj] = [cruncher.ratio(), i, j]; + } + } + } + if (bestRatio < cutoff) { + // no non-identical "pretty close" pair + if (eqi === null) { + ref4 = this._plainReplace(a, alo, ahi, b, blo, bhi); + // no identical pair either -- treat it as a straight replace + for (o = 0, len = ref4.length; o < len; o++) { + line = ref4[o]; + lines.push(line); + } + return lines; + } + // no close pair, but an identical pair -- synch up on that + [besti, bestj, bestRatio] = [eqi, eqj, 1.0]; + } else { + // there's a close pair, so forget the identical pair (if any) + eqi = null; + } + ref5 = this._fancyHelper(a, alo, besti, b, blo, bestj); + // a[besti] very similar to b[bestj]; eqi is null iff they're not + // identical + + // pump out diffs from before the synch point + for (p = 0, len1 = ref5.length; p < len1; p++) { + line = ref5[p]; + lines.push(line); + } + // do intraline marking on the synch pair + [aelt, belt] = [a[besti], b[bestj]]; + if (eqi === null) { + // pump out a '-', '?', '+', '?' quad for the synched lines + atags = btags = ''; + cruncher.setSeqs(aelt, belt); + ref6 = cruncher.getOpcodes(); + for (q = 0, len2 = ref6.length; q < len2; q++) { + [tag, ai1, ai2, bj1, bj2] = ref6[q]; + [la, lb] = [ai2 - ai1, bj2 - bj1]; + switch (tag) { + case 'replace': + atags += Array(la + 1).join('^'); + btags += Array(lb + 1).join('^'); + break; + case 'delete': + atags += Array(la + 1).join('-'); + break; + case 'insert': + btags += Array(lb + 1).join('+'); + break; + case 'equal': + atags += Array(la + 1).join(' '); + btags += Array(lb + 1).join(' '); + break; + default: + throw new Error(`unknow tag (${tag})`); + } + } + ref7 = this._qformat(aelt, belt, atags, btags); + for (r = 0, len3 = ref7.length; r < len3; r++) { + line = ref7[r]; + lines.push(line); + } + } else { + // the synch pair is identical + lines.push(' ' + aelt); + } + ref8 = this._fancyHelper(a, besti + 1, ahi, b, bestj + 1, bhi); + // pump out diffs from after the synch point + for (t = 0, len4 = ref8.length; t < len4; t++) { + line = ref8[t]; + lines.push(line); + } + return lines; + } + + _fancyHelper(a, alo, ahi, b, blo, bhi) { + var g; + g = []; + if (alo < ahi) { + if (blo < bhi) { + g = this._fancyReplace(a, alo, ahi, b, blo, bhi); + } else { + g = this._dump('-', a, alo, ahi); + } + } else if (blo < bhi) { + g = this._dump('+', b, blo, bhi); + } + return g; + } + + _qformat(aline, bline, atags, btags) { + /* + Format "?" output and deal with leading tabs. + + Example: + + >>> d = new Differ + >>> d._qformat('\tabcDefghiJkl\n', '\tabcdefGhijkl\n', + [ '- \tabcDefghiJkl\n', + '? \t ^ ^ ^\n', + '+ \tabcdefGhijkl\n', + '? \t ^ ^ ^\n' ] + */ + var common, lines; + lines = []; + // Can hurt, but will probably help most of the time. + common = min(_countLeading(aline, '\t'), _countLeading(bline, '\t')); + common = min(common, _countLeading(atags.slice(0, common), ' ')); + common = min(common, _countLeading(btags.slice(0, common), ' ')); + atags = atags.slice(common).replace(/\s+$/, ''); + btags = btags.slice(common).replace(/\s+$/, ''); + lines.push('- ' + aline); + if (atags.length) { + lines.push(`? ${Array(common + 1).join('\t')}${atags}\n`); + } + lines.push('+ ' + bline); + if (btags.length) { + lines.push(`? ${Array(common + 1).join('\t')}${btags}\n`); + } + return lines; + } + + }; + + // With respect to junk, an earlier version of ndiff simply refused to + // *start* a match with a junk element. The result was cases like this: + // before: private Thread currentThread; + // after: private volatile Thread currentThread; + // If you consider whitespace to be junk, the longest contiguous match + // not starting with junk is "e Thread currentThread". So ndiff reported + // that "e volatil" was inserted between the 't' and the 'e' in "private". + // While an accurate view, to people that's absurd. The current version + // looks for matching blocks that are entirely junk-free, then extends the + // longest one of those as far as possible but only with matching junk. + // So now "currentThread" is matched, then extended to suck up the + // preceding blank; then "private" is matched, and extended to suck up the + // following blank; then "Thread" is matched; and finally ndiff reports + // that "volatile " was inserted before "Thread". The only quibble + // remaining is that perhaps it was really the case that " volatile" + // was inserted after "private". I can live with that . + IS_LINE_JUNK = function(line, pat = /^\s*#?\s*$/) { + /* + Return 1 for ignorable line: iff `line` is blank or contains a single '#'. + + Examples: + + >>> IS_LINE_JUNK('\n') + true + >>> IS_LINE_JUNK(' # \n') + true + >>> IS_LINE_JUNK('hello\n') + false + */ + return pat.test(line); + }; + + IS_CHARACTER_JUNK = function(ch, ws = ' \t') { + /* + Return 1 for ignorable character: iff `ch` is a space or tab. + + Examples: + >>> IS_CHARACTER_JUNK(' ').should.be.true + true + >>> IS_CHARACTER_JUNK('\t').should.be.true + true + >>> IS_CHARACTER_JUNK('\n').should.be.false + false + >>> IS_CHARACTER_JUNK('x').should.be.false + false + */ + return indexOf.call(ws, ch) >= 0; + }; + + _formatRangeUnified = function(start, stop) { + var beginning, length; + /* + Convert range to the "ed" format' + */ + // Per the diff spec at http://www.unix.org/single_unix_specification/ + beginning = start + 1; // lines start numbering with one + length = stop - start; + if (length === 1) { + return `${beginning}`; + } + if (!length) { // empty ranges begin at line just before the range + beginning--; + } + return `${beginning},${length}`; + }; + + unifiedDiff = function(a, b, {fromfile, tofile, fromfiledate, tofiledate, n, lineterm} = {}) { + var file1Range, file2Range, first, fromdate, group, i1, i2, j1, j2, l, last, len, len1, len2, len3, len4, line, lines, m, o, p, q, ref, ref1, ref2, ref3, started, tag, todate; + /* + Compare two sequences of lines; generate the delta as a unified diff. + + Unified diffs are a compact way of showing line changes and a few + lines of context. The number of context lines is set by 'n' which + defaults to three. + + By default, the diff control lines (those with ---, +++, or @@) are + created with a trailing newline. + + For inputs that do not have trailing newlines, set the lineterm + argument to "" so that the output will be uniformly newline free. + + The unidiff format normally has a header for filenames and modification + times. Any or all of these may be specified using strings for + 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'. + The modification times are normally expressed in the ISO 8601 format. + + Example: + + >>> unifiedDiff('one two three four'.split(' '), + ... 'zero one tree four'.split(' '), { + ... fromfile: 'Original' + ... tofile: 'Current', + ... fromfiledate: '2005-01-26 23:30:50', + ... tofiledate: '2010-04-02 10:20:52', + ... lineterm: '' + ... }) + [ '--- Original\t2005-01-26 23:30:50', + '+++ Current\t2010-04-02 10:20:52', + '@@ -1,4 +1,4 @@', + '+zero', + ' one', + '-two', + '-three', + '+tree', + ' four' ] + + */ + if (fromfile == null) { + fromfile = ''; + } + if (tofile == null) { + tofile = ''; + } + if (fromfiledate == null) { + fromfiledate = ''; + } + if (tofiledate == null) { + tofiledate = ''; + } + if (n == null) { + n = 3; + } + if (lineterm == null) { + lineterm = '\n'; + } + lines = []; + started = false; + ref = (new SequenceMatcher(null, a, b)).getGroupedOpcodes(); + for (l = 0, len = ref.length; l < len; l++) { + group = ref[l]; + if (!started) { + started = true; + fromdate = fromfiledate ? `\t${fromfiledate}` : ''; + todate = tofiledate ? `\t${tofiledate}` : ''; + lines.push(`--- ${fromfile}${fromdate}${lineterm}`); + lines.push(`+++ ${tofile}${todate}${lineterm}`); + } + [first, last] = [group[0], group[group.length - 1]]; + file1Range = _formatRangeUnified(first[1], last[2]); + file2Range = _formatRangeUnified(first[3], last[4]); + lines.push(`@@ -${file1Range} +${file2Range} @@${lineterm}`); + for (m = 0, len1 = group.length; m < len1; m++) { + [tag, i1, i2, j1, j2] = group[m]; + if (tag === 'equal') { + ref1 = a.slice(i1, i2); + for (o = 0, len2 = ref1.length; o < len2; o++) { + line = ref1[o]; + lines.push(' ' + line); + } + continue; + } + if (tag === 'replace' || tag === 'delete') { + ref2 = a.slice(i1, i2); + for (p = 0, len3 = ref2.length; p < len3; p++) { + line = ref2[p]; + lines.push('-' + line); + } + } + if (tag === 'replace' || tag === 'insert') { + ref3 = b.slice(j1, j2); + for (q = 0, len4 = ref3.length; q < len4; q++) { + line = ref3[q]; + lines.push('+' + line); + } + } + } + } + return lines; + }; + + _formatRangeContext = function(start, stop) { + var beginning, length; + /* + Convert range to the "ed" format' + */ + // Per the diff spec at http://www.unix.org/single_unix_specification/ + beginning = start + 1; // lines start numbering with one + length = stop - start; + if (!length) { // empty ranges begin at line just before the range + beginning--; + } + if (length <= 1) { + return `${beginning}`; + } + return `${beginning},${beginning + length - 1}`; + }; + + // See http://www.unix.org/single_unix_specification/ + contextDiff = function(a, b, {fromfile, tofile, fromfiledate, tofiledate, n, lineterm} = {}) { + var _, file1Range, file2Range, first, fromdate, group, i1, i2, j1, j2, l, last, len, len1, len2, len3, len4, line, lines, m, o, p, prefix, q, ref, ref1, ref2, started, tag, todate; + /* + Compare two sequences of lines; generate the delta as a context diff. + + Context diffs are a compact way of showing line changes and a few + lines of context. The number of context lines is set by 'n' which + defaults to three. + + By default, the diff control lines (those with *** or ---) are + created with a trailing newline. This is helpful so that inputs + created from file.readlines() result in diffs that are suitable for + file.writelines() since both the inputs and outputs have trailing + newlines. + + For inputs that do not have trailing newlines, set the lineterm + argument to "" so that the output will be uniformly newline free. + + The context diff format normally has a header for filenames and + modification times. Any or all of these may be specified using + strings for 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'. + The modification times are normally expressed in the ISO 8601 format. + If not specified, the strings default to blanks. + + Example: + >>> a = ['one\n', 'two\n', 'three\n', 'four\n'] + >>> b = ['zero\n', 'one\n', 'tree\n', 'four\n'] + >>> contextDiff(a, b, {fromfile: 'Original', tofile: 'Current'}) + [ '*** Original\n', + '--- Current\n', + '***************\n', + '*** 1,4 ****\n', + ' one\n', + '! two\n', + '! three\n', + ' four\n', + '--- 1,4 ----\n', + '+ zero\n', + ' one\n', + '! tree\n', + ' four\n' ] + */ + if (fromfile == null) { + fromfile = ''; + } + if (tofile == null) { + tofile = ''; + } + if (fromfiledate == null) { + fromfiledate = ''; + } + if (tofiledate == null) { + tofiledate = ''; + } + if (n == null) { + n = 3; + } + if (lineterm == null) { + lineterm = '\n'; + } + prefix = { + insert: '+ ', + delete: '- ', + replace: '! ', + equal: ' ' + }; + started = false; + lines = []; + ref = (new SequenceMatcher(null, a, b)).getGroupedOpcodes(); + for (l = 0, len = ref.length; l < len; l++) { + group = ref[l]; + if (!started) { + started = true; + fromdate = fromfiledate ? `\t${fromfiledate}` : ''; + todate = tofiledate ? `\t${tofiledate}` : ''; + lines.push(`*** ${fromfile}${fromdate}${lineterm}`); + lines.push(`--- ${tofile}${todate}${lineterm}`); + [first, last] = [group[0], group[group.length - 1]]; + lines.push('***************' + lineterm); + file1Range = _formatRangeContext(first[1], last[2]); + lines.push(`*** ${file1Range} ****${lineterm}`); + if (_any((function() { + var len1, m, results; + results = []; + for (m = 0, len1 = group.length; m < len1; m++) { + [tag, _, _, _, _] = group[m]; + results.push(tag === 'replace' || tag === 'delete'); + } + return results; + })())) { + for (m = 0, len1 = group.length; m < len1; m++) { + [tag, i1, i2, _, _] = group[m]; + if (tag !== 'insert') { + ref1 = a.slice(i1, i2); + for (o = 0, len2 = ref1.length; o < len2; o++) { + line = ref1[o]; + lines.push(prefix[tag] + line); + } + } + } + } + file2Range = _formatRangeContext(first[3], last[4]); + lines.push(`--- ${file2Range} ----${lineterm}`); + if (_any((function() { + var len3, p, results; + results = []; + for (p = 0, len3 = group.length; p < len3; p++) { + [tag, _, _, _, _] = group[p]; + results.push(tag === 'replace' || tag === 'insert'); + } + return results; + })())) { + for (p = 0, len3 = group.length; p < len3; p++) { + [tag, _, _, j1, j2] = group[p]; + if (tag !== 'delete') { + ref2 = b.slice(j1, j2); + for (q = 0, len4 = ref2.length; q < len4; q++) { + line = ref2[q]; + lines.push(prefix[tag] + line); + } + } + } + } + } + } + return lines; + }; + + ndiff = function(a, b, linejunk, charjunk = IS_CHARACTER_JUNK) { + /* + Compare `a` and `b` (lists of strings); return a `Differ`-style delta. + + Optional keyword parameters `linejunk` and `charjunk` are for filter + functions (or None): + + - linejunk: A function that should accept a single string argument, and + return true iff the string is junk. The default is null, and is + recommended; + + - charjunk: A function that should accept a string of length 1. The + default is module-level function IS_CHARACTER_JUNK, which filters out + whitespace characters (a blank or tab; note: bad idea to include newline + in this!). + + Example: + >>> a = ['one\n', 'two\n', 'three\n'] + >>> b = ['ore\n', 'tree\n', 'emu\n'] + >>> ndiff(a, b) + [ '- one\n', + '? ^\n', + '+ ore\n', + '? ^\n', + '- two\n', + '- three\n', + '? -\n', + '+ tree\n', + '+ emu\n' ] + */ + return (new Differ(linejunk, charjunk)).compare(a, b); + }; + + restore = function(delta, which) { + /* + Generate one of the two sequences that generated a delta. + + Given a `delta` produced by `Differ.compare()` or `ndiff()`, extract + lines originating from file 1 or 2 (parameter `which`), stripping off line + prefixes. + + Examples: + >>> a = ['one\n', 'two\n', 'three\n'] + >>> b = ['ore\n', 'tree\n', 'emu\n'] + >>> diff = ndiff(a, b) + >>> restore(diff, 1) + [ 'one\n', + 'two\n', + 'three\n' ] + >>> restore(diff, 2) + [ 'ore\n', + 'tree\n', + 'emu\n' ] + */ + var l, len, line, lines, prefixes, ref, tag; + tag = { + 1: '- ', + 2: '+ ' + }[which]; + if (!tag) { + throw new Error(`unknow delta choice (must be 1 or 2): ${which}`); + } + prefixes = [' ', tag]; + lines = []; + for (l = 0, len = delta.length; l < len; l++) { + line = delta[l]; + if (ref = line.slice(0, 2), indexOf.call(prefixes, ref) >= 0) { + lines.push(line.slice(2)); + } + } + return lines; + }; + + // exports to global + exports._arrayCmp = _arrayCmp; + + exports.SequenceMatcher = SequenceMatcher; + + exports.getCloseMatches = getCloseMatches; + + exports._countLeading = _countLeading; + + exports.Differ = Differ; + + exports.IS_LINE_JUNK = IS_LINE_JUNK; + + exports.IS_CHARACTER_JUNK = IS_CHARACTER_JUNK; + + exports._formatRangeUnified = _formatRangeUnified; + + exports.unifiedDiff = unifiedDiff; + + exports._formatRangeContext = _formatRangeContext; + + exports.contextDiff = contextDiff; + + exports.ndiff = ndiff; + + exports.restore = restore; + +}).call(this); + + +/***/ }), + +/***/ 7864: +/***/ ((module) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + createTokenAuth: () => createTokenAuth +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/auth.js +var REGEX_IS_INSTALLATION_LEGACY = /^v1\./; +var REGEX_IS_INSTALLATION = /^ghs_/; +var REGEX_IS_USER_TO_SERVER = /^ghu_/; +async function auth(token) { + const isApp = token.split(/\./).length === 3; + const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); + const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; + return { + type: "token", + token, + tokenType + }; +} + +// pkg/dist-src/with-authorization-prefix.js +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; + } + return `token ${token}`; +} + +// pkg/dist-src/hook.js +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge( + route, + parameters + ); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} + +// pkg/dist-src/index.js +var createTokenAuth = function createTokenAuth2(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } + if (typeof token !== "string") { + throw new Error( + "[@octokit/auth-token] Token passed to createTokenAuth is not a string" + ); + } + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 1897: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + Octokit: () => Octokit +}); +module.exports = __toCommonJS(dist_src_exports); +var import_universal_user_agent = __nccwpck_require__(3843); +var import_before_after_hook = __nccwpck_require__(2732); +var import_request = __nccwpck_require__(8636); +var import_graphql = __nccwpck_require__(7); +var import_auth_token = __nccwpck_require__(7864); + +// pkg/dist-src/version.js +var VERSION = "5.2.0"; + +// pkg/dist-src/index.js +var noop = () => { +}; +var consoleWarn = console.warn.bind(console); +var consoleError = console.error.bind(console); +var userAgentTrail = `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var Octokit = class { + static { + this.VERSION = VERSION; + } + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + super( + Object.assign( + {}, + defaults, + options, + options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null + ) + ); + } + }; + return OctokitWithDefaults; + } + static { + this.plugins = []; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + static plugin(...newPlugins) { + const currentPlugins = this.plugins; + const NewOctokit = class extends this { + static { + this.plugins = currentPlugins.concat( + newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) + ); + } + }; + return NewOctokit; + } + constructor(options = {}) { + const hook = new import_before_after_hook.Collection(); + const requestDefaults = { + baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type + hook: hook.bind(null, "request") + }), + mediaType: { + previews: [], + format: "" + } + }; + requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail; + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; + } + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; + } + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } + this.request = import_request.request.defaults(requestDefaults); + this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults); + this.log = Object.assign( + { + debug: noop, + info: noop, + warn: consoleWarn, + error: consoleError + }, + options.log + ); + this.hook = hook; + if (!options.authStrategy) { + if (!options.auth) { + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + const auth = (0, import_auth_token.createTokenAuth)(options.auth); + hook.wrap("request", auth.hook); + this.auth = auth; + } + } else { + const { authStrategy, ...otherOptions } = options; + const auth = authStrategy( + Object.assign( + { + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions + }, + options.auth + ) + ); + hook.wrap("request", auth.hook); + this.auth = auth; + } + const classConstructor = this.constructor; + for (let i = 0; i < classConstructor.plugins.length; ++i) { + Object.assign(this, classConstructor.plugins[i](this, options)); + } + } +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 4471: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + endpoint: () => endpoint +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/defaults.js +var import_universal_user_agent = __nccwpck_require__(3843); + +// pkg/dist-src/version.js +var VERSION = "9.0.5"; + +// pkg/dist-src/defaults.js +var userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; +var DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "" + } +}; + +// pkg/dist-src/util/lowercase-keys.js +function lowercaseKeys(object) { + if (!object) { + return {}; + } + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} + +// pkg/dist-src/util/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} + +// pkg/dist-src/util/merge-deep.js +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach((key) => { + if (isPlainObject(options[key])) { + if (!(key in defaults)) + Object.assign(result, { [key]: options[key] }); + else + result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { [key]: options[key] }); + } + }); + return result; +} + +// pkg/dist-src/util/remove-undefined-properties.js +function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === void 0) { + delete obj[key]; + } + } + return obj; +} + +// pkg/dist-src/merge.js +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { method, url } : { url: method }, options); + } else { + options = Object.assign({}, route); + } + options.headers = lowercaseKeys(options.headers); + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); + if (options.url === "/graphql") { + if (defaults && defaults.mediaType.previews?.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter( + (preview) => !mergedOptions.mediaType.previews.includes(preview) + ).concat(mergedOptions.mediaType.previews); + } + mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, "")); + } + return mergedOptions; +} + +// pkg/dist-src/util/add-query-parameters.js +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + if (names.length === 0) { + return url; + } + return url + separator + names.map((name) => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + } + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); +} + +// pkg/dist-src/util/extract-url-variable-names.js +var urlVariableRegex = /\{[^}]+\}/g; +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + if (!matches) { + return []; + } + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} + +// pkg/dist-src/util/omit.js +function omit(object, keysToOmit) { + const result = { __proto__: null }; + for (const key of Object.keys(object)) { + if (keysToOmit.indexOf(key) === -1) { + result[key] = object[key]; + } + } + return result; +} + +// pkg/dist-src/util/url-template.js +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + } + return part; + }).join(""); +} +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; + } +} +function isDefined(value) { + return value !== void 0 && value !== null; +} +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} +function getValues(context, operator, key, modifier) { + var value = context[key], result = []; + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } + result.push( + encodeValue(operator, value, isKeyOperator(operator) ? key : "") + ); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + result.push( + encodeValue(operator, value2, isKeyOperator(operator) ? key : "") + ); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } + } else { + const tmp = []; + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + tmp.push(encodeValue(operator, value2)); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); + } + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); + } + } + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); + } + } + return result; +} +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; +} +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + template = template.replace( + /\{([^\{\}]+)\}|([^\{\}]+)/g, + function(_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } + expression.split(/,/g).forEach(function(variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + if (operator && operator !== "+") { + var separator = ","; + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); + } + } else { + return encodeReserved(literal); + } + } + ); + if (template === "/") { + return template; + } else { + return template.replace(/\/$/, ""); + } +} + +// pkg/dist-src/parse.js +function parse(options) { + let method = options.method.toUpperCase(); + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, [ + "method", + "baseUrl", + "url", + "headers", + "request", + "mediaType" + ]); + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } + const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); + if (!isBinaryRequest) { + if (options.mediaType.format) { + headers.accept = headers.accept.split(/,/).map( + (format) => format.replace( + /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, + `application/vnd$1$2.${options.mediaType.format}` + ) + ).join(","); + } + if (url.endsWith("/graphql")) { + if (options.mediaType.previews?.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } + } + } + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } + } + } + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } + return Object.assign( + { method, url, headers }, + typeof body !== "undefined" ? { body } : null, + options.request ? { request: options.request } : null + ); +} + +// pkg/dist-src/endpoint-with-defaults.js +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); +} + +// pkg/dist-src/with-defaults.js +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS2 = merge(oldDefaults, newDefaults); + const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); + return Object.assign(endpoint2, { + DEFAULTS: DEFAULTS2, + defaults: withDefaults.bind(null, DEFAULTS2), + merge: merge.bind(null, DEFAULTS2), + parse + }); +} + +// pkg/dist-src/index.js +var endpoint = withDefaults(null, DEFAULTS); +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 7: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + GraphqlResponseError: () => GraphqlResponseError, + graphql: () => graphql2, + withCustomRequest: () => withCustomRequest +}); +module.exports = __toCommonJS(dist_src_exports); +var import_request3 = __nccwpck_require__(8636); +var import_universal_user_agent = __nccwpck_require__(3843); + +// pkg/dist-src/version.js +var VERSION = "7.1.0"; + +// pkg/dist-src/with-defaults.js +var import_request2 = __nccwpck_require__(8636); + +// pkg/dist-src/graphql.js +var import_request = __nccwpck_require__(8636); + +// pkg/dist-src/error.js +function _buildMessageForResponseErrors(data) { + return `Request failed due to following response errors: +` + data.errors.map((e) => ` - ${e.message}`).join("\n"); +} +var GraphqlResponseError = class extends Error { + constructor(request2, headers, response) { + super(_buildMessageForResponseErrors(response)); + this.request = request2; + this.headers = headers; + this.response = response; + this.name = "GraphqlResponseError"; + this.errors = response.errors; + this.data = response.data; + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + } +}; + +// pkg/dist-src/graphql.js +var NON_VARIABLE_OPTIONS = [ + "method", + "baseUrl", + "url", + "headers", + "request", + "query", + "mediaType" +]; +var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; +var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; +function graphql(request2, query, options) { + if (options) { + if (typeof query === "string" && "query" in options) { + return Promise.reject( + new Error(`[@octokit/graphql] "query" cannot be used as variable name`) + ); + } + for (const key in options) { + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) + continue; + return Promise.reject( + new Error( + `[@octokit/graphql] "${key}" cannot be used as variable name` + ) + ); + } + } + const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; + const requestOptions = Object.keys( + parsedOptions + ).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = parsedOptions[key]; + return result; + } + if (!result.variables) { + result.variables = {}; + } + result.variables[key] = parsedOptions[key]; + return result; + }, {}); + const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; + if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { + requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); + } + return request2(requestOptions).then((response) => { + if (response.data.errors) { + const headers = {}; + for (const key of Object.keys(response.headers)) { + headers[key] = response.headers[key]; + } + throw new GraphqlResponseError( + requestOptions, + headers, + response.data + ); + } + return response.data.data; + }); +} + +// pkg/dist-src/with-defaults.js +function withDefaults(request2, newDefaults) { + const newRequest = request2.defaults(newDefaults); + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: newRequest.endpoint + }); +} + +// pkg/dist-src/index.js +var graphql2 = withDefaults(import_request3.request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + }, + method: "POST", + url: "/graphql" +}); +function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); +} +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 8082: +/***/ ((module) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + composePaginateRest: () => composePaginateRest, + isPaginatingEndpoint: () => isPaginatingEndpoint, + paginateRest: () => paginateRest, + paginatingEndpoints: () => paginatingEndpoints +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/version.js +var VERSION = "9.2.1"; + +// pkg/dist-src/normalize-paginated-list-response.js +function normalizePaginatedListResponse(response) { + if (!response.data) { + return { + ...response, + data: [] + }; + } + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) + return response; + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; + } + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; + } + response.data.total_count = totalCount; + return response; +} + +// pkg/dist-src/iterator.js +function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + async next() { + if (!url) + return { done: true }; + try { + const response = await requestMethod({ method, url, headers }); + const normalizedResponse = normalizePaginatedListResponse(response); + url = ((normalizedResponse.headers.link || "").match( + /<([^>]+)>;\s*rel="next"/ + ) || [])[1]; + return { value: normalizedResponse }; + } catch (error) { + if (error.status !== 409) + throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [] + } + }; + } + } + }) + }; +} + +// pkg/dist-src/paginate.js +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = void 0; + } + return gather( + octokit, + [], + iterator(octokit, route, parameters)[Symbol.asyncIterator](), + mapFn + ); +} +function gather(octokit, results, iterator2, mapFn) { + return iterator2.next().then((result) => { + if (result.done) { + return results; + } + let earlyExit = false; + function done() { + earlyExit = true; + } + results = results.concat( + mapFn ? mapFn(result.value, done) : result.value.data + ); + if (earlyExit) { + return results; + } + return gather(octokit, results, iterator2, mapFn); + }); +} + +// pkg/dist-src/compose-paginate.js +var composePaginateRest = Object.assign(paginate, { + iterator +}); + +// pkg/dist-src/generated/paginating-endpoints.js +var paginatingEndpoints = [ + "GET /advisories", + "GET /app/hook/deliveries", + "GET /app/installation-requests", + "GET /app/installations", + "GET /assignments/{assignment_id}/accepted_assignments", + "GET /classrooms", + "GET /classrooms/{classroom_id}/assignments", + "GET /enterprises/{enterprise}/dependabot/alerts", + "GET /enterprises/{enterprise}/secret-scanning/alerts", + "GET /events", + "GET /gists", + "GET /gists/public", + "GET /gists/starred", + "GET /gists/{gist_id}/comments", + "GET /gists/{gist_id}/commits", + "GET /gists/{gist_id}/forks", + "GET /installation/repositories", + "GET /issues", + "GET /licenses", + "GET /marketplace_listing/plans", + "GET /marketplace_listing/plans/{plan_id}/accounts", + "GET /marketplace_listing/stubbed/plans", + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", + "GET /networks/{owner}/{repo}/events", + "GET /notifications", + "GET /organizations", + "GET /orgs/{org}/actions/cache/usage-by-repository", + "GET /orgs/{org}/actions/permissions/repositories", + "GET /orgs/{org}/actions/runners", + "GET /orgs/{org}/actions/secrets", + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", + "GET /orgs/{org}/actions/variables", + "GET /orgs/{org}/actions/variables/{name}/repositories", + "GET /orgs/{org}/blocks", + "GET /orgs/{org}/code-scanning/alerts", + "GET /orgs/{org}/codespaces", + "GET /orgs/{org}/codespaces/secrets", + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories", + "GET /orgs/{org}/copilot/billing/seats", + "GET /orgs/{org}/dependabot/alerts", + "GET /orgs/{org}/dependabot/secrets", + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", + "GET /orgs/{org}/events", + "GET /orgs/{org}/failed_invitations", + "GET /orgs/{org}/hooks", + "GET /orgs/{org}/hooks/{hook_id}/deliveries", + "GET /orgs/{org}/installations", + "GET /orgs/{org}/invitations", + "GET /orgs/{org}/invitations/{invitation_id}/teams", + "GET /orgs/{org}/issues", + "GET /orgs/{org}/members", + "GET /orgs/{org}/members/{username}/codespaces", + "GET /orgs/{org}/migrations", + "GET /orgs/{org}/migrations/{migration_id}/repositories", + "GET /orgs/{org}/organization-roles/{role_id}/teams", + "GET /orgs/{org}/organization-roles/{role_id}/users", + "GET /orgs/{org}/outside_collaborators", + "GET /orgs/{org}/packages", + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + "GET /orgs/{org}/personal-access-token-requests", + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories", + "GET /orgs/{org}/personal-access-tokens", + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories", + "GET /orgs/{org}/projects", + "GET /orgs/{org}/properties/values", + "GET /orgs/{org}/public_members", + "GET /orgs/{org}/repos", + "GET /orgs/{org}/rulesets", + "GET /orgs/{org}/rulesets/rule-suites", + "GET /orgs/{org}/secret-scanning/alerts", + "GET /orgs/{org}/security-advisories", + "GET /orgs/{org}/teams", + "GET /orgs/{org}/teams/{team_slug}/discussions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/invitations", + "GET /orgs/{org}/teams/{team_slug}/members", + "GET /orgs/{org}/teams/{team_slug}/projects", + "GET /orgs/{org}/teams/{team_slug}/repos", + "GET /orgs/{org}/teams/{team_slug}/teams", + "GET /projects/columns/{column_id}/cards", + "GET /projects/{project_id}/collaborators", + "GET /projects/{project_id}/columns", + "GET /repos/{owner}/{repo}/actions/artifacts", + "GET /repos/{owner}/{repo}/actions/caches", + "GET /repos/{owner}/{repo}/actions/organization-secrets", + "GET /repos/{owner}/{repo}/actions/organization-variables", + "GET /repos/{owner}/{repo}/actions/runners", + "GET /repos/{owner}/{repo}/actions/runs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", + "GET /repos/{owner}/{repo}/actions/secrets", + "GET /repos/{owner}/{repo}/actions/variables", + "GET /repos/{owner}/{repo}/actions/workflows", + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", + "GET /repos/{owner}/{repo}/activity", + "GET /repos/{owner}/{repo}/assignees", + "GET /repos/{owner}/{repo}/branches", + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", + "GET /repos/{owner}/{repo}/code-scanning/alerts", + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + "GET /repos/{owner}/{repo}/code-scanning/analyses", + "GET /repos/{owner}/{repo}/codespaces", + "GET /repos/{owner}/{repo}/codespaces/devcontainers", + "GET /repos/{owner}/{repo}/codespaces/secrets", + "GET /repos/{owner}/{repo}/collaborators", + "GET /repos/{owner}/{repo}/comments", + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/commits", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", + "GET /repos/{owner}/{repo}/commits/{ref}/status", + "GET /repos/{owner}/{repo}/commits/{ref}/statuses", + "GET /repos/{owner}/{repo}/contributors", + "GET /repos/{owner}/{repo}/dependabot/alerts", + "GET /repos/{owner}/{repo}/dependabot/secrets", + "GET /repos/{owner}/{repo}/deployments", + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + "GET /repos/{owner}/{repo}/environments", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", + "GET /repos/{owner}/{repo}/events", + "GET /repos/{owner}/{repo}/forks", + "GET /repos/{owner}/{repo}/hooks", + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", + "GET /repos/{owner}/{repo}/invitations", + "GET /repos/{owner}/{repo}/issues", + "GET /repos/{owner}/{repo}/issues/comments", + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/issues/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", + "GET /repos/{owner}/{repo}/issues/{issue_number}/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", + "GET /repos/{owner}/{repo}/keys", + "GET /repos/{owner}/{repo}/labels", + "GET /repos/{owner}/{repo}/milestones", + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", + "GET /repos/{owner}/{repo}/notifications", + "GET /repos/{owner}/{repo}/pages/builds", + "GET /repos/{owner}/{repo}/projects", + "GET /repos/{owner}/{repo}/pulls", + "GET /repos/{owner}/{repo}/pulls/comments", + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", + "GET /repos/{owner}/{repo}/releases", + "GET /repos/{owner}/{repo}/releases/{release_id}/assets", + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", + "GET /repos/{owner}/{repo}/rules/branches/{branch}", + "GET /repos/{owner}/{repo}/rulesets", + "GET /repos/{owner}/{repo}/rulesets/rule-suites", + "GET /repos/{owner}/{repo}/secret-scanning/alerts", + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", + "GET /repos/{owner}/{repo}/security-advisories", + "GET /repos/{owner}/{repo}/stargazers", + "GET /repos/{owner}/{repo}/subscribers", + "GET /repos/{owner}/{repo}/tags", + "GET /repos/{owner}/{repo}/teams", + "GET /repos/{owner}/{repo}/topics", + "GET /repositories", + "GET /repositories/{repository_id}/environments/{environment_name}/secrets", + "GET /repositories/{repository_id}/environments/{environment_name}/variables", + "GET /search/code", + "GET /search/commits", + "GET /search/issues", + "GET /search/labels", + "GET /search/repositories", + "GET /search/topics", + "GET /search/users", + "GET /teams/{team_id}/discussions", + "GET /teams/{team_id}/discussions/{discussion_number}/comments", + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /teams/{team_id}/discussions/{discussion_number}/reactions", + "GET /teams/{team_id}/invitations", + "GET /teams/{team_id}/members", + "GET /teams/{team_id}/projects", + "GET /teams/{team_id}/repos", + "GET /teams/{team_id}/teams", + "GET /user/blocks", + "GET /user/codespaces", + "GET /user/codespaces/secrets", + "GET /user/emails", + "GET /user/followers", + "GET /user/following", + "GET /user/gpg_keys", + "GET /user/installations", + "GET /user/installations/{installation_id}/repositories", + "GET /user/issues", + "GET /user/keys", + "GET /user/marketplace_purchases", + "GET /user/marketplace_purchases/stubbed", + "GET /user/memberships/orgs", + "GET /user/migrations", + "GET /user/migrations/{migration_id}/repositories", + "GET /user/orgs", + "GET /user/packages", + "GET /user/packages/{package_type}/{package_name}/versions", + "GET /user/public_emails", + "GET /user/repos", + "GET /user/repository_invitations", + "GET /user/social_accounts", + "GET /user/ssh_signing_keys", + "GET /user/starred", + "GET /user/subscriptions", + "GET /user/teams", + "GET /users", + "GET /users/{username}/events", + "GET /users/{username}/events/orgs/{org}", + "GET /users/{username}/events/public", + "GET /users/{username}/followers", + "GET /users/{username}/following", + "GET /users/{username}/gists", + "GET /users/{username}/gpg_keys", + "GET /users/{username}/keys", + "GET /users/{username}/orgs", + "GET /users/{username}/packages", + "GET /users/{username}/projects", + "GET /users/{username}/received_events", + "GET /users/{username}/received_events/public", + "GET /users/{username}/repos", + "GET /users/{username}/social_accounts", + "GET /users/{username}/ssh_signing_keys", + "GET /users/{username}/starred", + "GET /users/{username}/subscriptions" +]; + +// pkg/dist-src/paginating-endpoints.js +function isPaginatingEndpoint(arg) { + if (typeof arg === "string") { + return paginatingEndpoints.includes(arg); + } else { + return false; + } +} + +// pkg/dist-src/index.js +function paginateRest(octokit) { + return { + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit) + }) + }; +} +paginateRest.VERSION = VERSION; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 4935: +/***/ ((module) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + legacyRestEndpointMethods: () => legacyRestEndpointMethods, + restEndpointMethods: () => restEndpointMethods +}); +module.exports = __toCommonJS(dist_src_exports); + +// pkg/dist-src/version.js +var VERSION = "10.4.1"; + +// pkg/dist-src/generated/endpoints.js +var Endpoints = { + actions: { + addCustomLabelsToSelfHostedRunnerForOrg: [ + "POST /orgs/{org}/actions/runners/{runner_id}/labels" + ], + addCustomLabelsToSelfHostedRunnerForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + approveWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" + ], + cancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" + ], + createEnvironmentVariable: [ + "POST /repositories/{repository_id}/environments/{environment_name}/variables" + ], + createOrUpdateEnvironmentSecret: [ + "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + createOrgVariable: ["POST /orgs/{org}/actions/variables"], + createRegistrationTokenForOrg: [ + "POST /orgs/{org}/actions/runners/registration-token" + ], + createRegistrationTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/registration-token" + ], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/remove-token" + ], + createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], + createWorkflowDispatch: [ + "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" + ], + deleteActionsCacheById: [ + "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" + ], + deleteActionsCacheByKey: [ + "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" + ], + deleteArtifact: [ + "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" + ], + deleteEnvironmentSecret: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + deleteEnvironmentVariable: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + deleteRepoVariable: [ + "DELETE /repos/{owner}/{repo}/actions/variables/{name}" + ], + deleteSelfHostedRunnerFromOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}" + ], + deleteSelfHostedRunnerFromRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], + deleteWorkflowRunLogs: [ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + disableSelectedRepositoryGithubActionsOrganization: [ + "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + disableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" + ], + downloadArtifact: [ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" + ], + downloadJobLogsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" + ], + downloadWorkflowRunAttemptLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" + ], + downloadWorkflowRunLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + enableSelectedRepositoryGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + enableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" + ], + forceCancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel" + ], + generateRunnerJitconfigForOrg: [ + "POST /orgs/{org}/actions/runners/generate-jitconfig" + ], + generateRunnerJitconfigForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" + ], + getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], + getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], + getActionsCacheUsageByRepoForOrg: [ + "GET /orgs/{org}/actions/cache/usage-by-repository" + ], + getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], + getAllowedActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/selected-actions" + ], + getAllowedActionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getCustomOidcSubClaimForRepo: [ + "GET /repos/{owner}/{repo}/actions/oidc/customization/sub" + ], + getEnvironmentPublicKey: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key" + ], + getEnvironmentSecret: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + getEnvironmentVariable: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + getGithubActionsDefaultWorkflowPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions/workflow" + ], + getGithubActionsDefaultWorkflowPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/workflow" + ], + getGithubActionsPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions" + ], + getGithubActionsPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions" + ], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], + getPendingDeploymentsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + getRepoPermissions: [ + "GET /repos/{owner}/{repo}/actions/permissions", + {}, + { renamed: ["actions", "getGithubActionsPermissionsRepository"] } + ], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], + getReviewsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" + ], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowAccessToRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/access" + ], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" + ], + getWorkflowRunUsage: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" + ], + getWorkflowUsage: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" + ], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listEnvironmentSecrets: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets" + ], + listEnvironmentVariables: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables" + ], + listJobsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" + ], + listJobsForWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" + ], + listLabelsForSelfHostedRunnerForOrg: [ + "GET /orgs/{org}/actions/runners/{runner_id}/labels" + ], + listLabelsForSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listOrgVariables: ["GET /orgs/{org}/actions/variables"], + listRepoOrganizationSecrets: [ + "GET /repos/{owner}/{repo}/actions/organization-secrets" + ], + listRepoOrganizationVariables: [ + "GET /repos/{owner}/{repo}/actions/organization-variables" + ], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/downloads" + ], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + listSelectedReposForOrgVariable: [ + "GET /orgs/{org}/actions/variables/{name}/repositories" + ], + listSelectedRepositoriesEnabledGithubActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/repositories" + ], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowRunArtifacts: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" + ], + listWorkflowRuns: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" + ], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunJobForWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" + ], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + reRunWorkflowFailedJobs: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" + ], + removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" + ], + removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + removeCustomLabelFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" + ], + removeCustomLabelFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgVariable: [ + "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + reviewCustomGatesForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" + ], + reviewPendingDeploymentsForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + setAllowedActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/selected-actions" + ], + setAllowedActionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + setCustomLabelsForSelfHostedRunnerForOrg: [ + "PUT /orgs/{org}/actions/runners/{runner_id}/labels" + ], + setCustomLabelsForSelfHostedRunnerForRepo: [ + "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + setCustomOidcSubClaimForRepo: [ + "PUT /repos/{owner}/{repo}/actions/oidc/customization/sub" + ], + setGithubActionsDefaultWorkflowPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/workflow" + ], + setGithubActionsDefaultWorkflowPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/workflow" + ], + setGithubActionsPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions" + ], + setGithubActionsPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories" + ], + setSelectedRepositoriesEnabledGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories" + ], + setWorkflowAccessToRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/access" + ], + updateEnvironmentVariable: [ + "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"], + updateRepoVariable: [ + "PATCH /repos/{owner}/{repo}/actions/variables/{name}" + ] + }, + activity: { + checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], + deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], + deleteThreadSubscription: [ + "DELETE /notifications/threads/{thread_id}/subscription" + ], + getFeeds: ["GET /feeds"], + getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], + getThread: ["GET /notifications/threads/{thread_id}"], + getThreadSubscriptionForAuthenticatedUser: [ + "GET /notifications/threads/{thread_id}/subscription" + ], + listEventsForAuthenticatedUser: ["GET /users/{username}/events"], + listNotificationsForAuthenticatedUser: ["GET /notifications"], + listOrgEventsForAuthenticatedUser: [ + "GET /users/{username}/events/orgs/{org}" + ], + listPublicEvents: ["GET /events"], + listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], + listPublicEventsForUser: ["GET /users/{username}/events/public"], + listPublicOrgEvents: ["GET /orgs/{org}/events"], + listReceivedEventsForUser: ["GET /users/{username}/received_events"], + listReceivedPublicEventsForUser: [ + "GET /users/{username}/received_events/public" + ], + listRepoEvents: ["GET /repos/{owner}/{repo}/events"], + listRepoNotificationsForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/notifications" + ], + listReposStarredByAuthenticatedUser: ["GET /user/starred"], + listReposStarredByUser: ["GET /users/{username}/starred"], + listReposWatchedByUser: ["GET /users/{username}/subscriptions"], + listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], + listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], + listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], + markNotificationsAsRead: ["PUT /notifications"], + markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], + markThreadAsDone: ["DELETE /notifications/threads/{thread_id}"], + markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], + setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], + setThreadSubscription: [ + "PUT /notifications/threads/{thread_id}/subscription" + ], + starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], + unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] + }, + apps: { + addRepoToInstallation: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] } + ], + addRepoToInstallationForAuthenticatedUser: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}" + ], + checkToken: ["POST /applications/{client_id}/token"], + createFromManifest: ["POST /app-manifests/{code}/conversions"], + createInstallationAccessToken: [ + "POST /app/installations/{installation_id}/access_tokens" + ], + deleteAuthorization: ["DELETE /applications/{client_id}/grant"], + deleteInstallation: ["DELETE /app/installations/{installation_id}"], + deleteToken: ["DELETE /applications/{client_id}/token"], + getAuthenticated: ["GET /app"], + getBySlug: ["GET /apps/{app_slug}"], + getInstallation: ["GET /app/installations/{installation_id}"], + getOrgInstallation: ["GET /orgs/{org}/installation"], + getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], + getSubscriptionPlanForAccount: [ + "GET /marketplace_listing/accounts/{account_id}" + ], + getSubscriptionPlanForAccountStubbed: [ + "GET /marketplace_listing/stubbed/accounts/{account_id}" + ], + getUserInstallation: ["GET /users/{username}/installation"], + getWebhookConfigForApp: ["GET /app/hook/config"], + getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], + listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], + listAccountsForPlanStubbed: [ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" + ], + listInstallationReposForAuthenticatedUser: [ + "GET /user/installations/{installation_id}/repositories" + ], + listInstallationRequestsForAuthenticatedApp: [ + "GET /app/installation-requests" + ], + listInstallations: ["GET /app/installations"], + listInstallationsForAuthenticatedUser: ["GET /user/installations"], + listPlans: ["GET /marketplace_listing/plans"], + listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], + listReposAccessibleToInstallation: ["GET /installation/repositories"], + listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], + listSubscriptionsForAuthenticatedUserStubbed: [ + "GET /user/marketplace_purchases/stubbed" + ], + listWebhookDeliveries: ["GET /app/hook/deliveries"], + redeliverWebhookDelivery: [ + "POST /app/hook/deliveries/{delivery_id}/attempts" + ], + removeRepoFromInstallation: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] } + ], + removeRepoFromInstallationForAuthenticatedUser: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}" + ], + resetToken: ["PATCH /applications/{client_id}/token"], + revokeInstallationAccessToken: ["DELETE /installation/token"], + scopeToken: ["POST /applications/{client_id}/token/scoped"], + suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], + unsuspendInstallation: [ + "DELETE /app/installations/{installation_id}/suspended" + ], + updateWebhookConfigForApp: ["PATCH /app/hook/config"] + }, + billing: { + getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], + getGithubActionsBillingUser: [ + "GET /users/{username}/settings/billing/actions" + ], + getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], + getGithubPackagesBillingUser: [ + "GET /users/{username}/settings/billing/packages" + ], + getSharedStorageBillingOrg: [ + "GET /orgs/{org}/settings/billing/shared-storage" + ], + getSharedStorageBillingUser: [ + "GET /users/{username}/settings/billing/shared-storage" + ] + }, + checks: { + create: ["POST /repos/{owner}/{repo}/check-runs"], + createSuite: ["POST /repos/{owner}/{repo}/check-suites"], + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], + listAnnotations: [ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" + ], + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], + listForSuite: [ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" + ], + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], + rerequestRun: [ + "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest" + ], + rerequestSuite: [ + "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" + ], + setSuitesPreferences: [ + "PATCH /repos/{owner}/{repo}/check-suites/preferences" + ], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] + }, + codeScanning: { + deleteAnalysis: [ + "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}" + ], + getAlert: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", + {}, + { renamedParameters: { alert_id: "alert_number" } } + ], + getAnalysis: [ + "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}" + ], + getCodeqlDatabase: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}" + ], + getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"], + getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], + listAlertInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" + ], + listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], + listAlertsInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + {}, + { renamed: ["codeScanning", "listAlertInstances"] } + ], + listCodeqlDatabases: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases" + ], + listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" + ], + updateDefaultSetup: [ + "PATCH /repos/{owner}/{repo}/code-scanning/default-setup" + ], + uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] + }, + codesOfConduct: { + getAllCodesOfConduct: ["GET /codes_of_conduct"], + getConductCode: ["GET /codes_of_conduct/{key}"] + }, + codespaces: { + addRepositoryForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + checkPermissionsForDevcontainer: [ + "GET /repos/{owner}/{repo}/codespaces/permissions_check" + ], + codespaceMachinesForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/machines" + ], + createForAuthenticatedUser: ["POST /user/codespaces"], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + createOrUpdateSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}" + ], + createWithPrForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces" + ], + createWithRepoForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/codespaces" + ], + deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], + deleteFromOrganization: [ + "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + deleteSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}" + ], + exportForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/exports" + ], + getCodespacesForUserInOrg: [ + "GET /orgs/{org}/members/{username}/codespaces" + ], + getExportDetailsForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/exports/{export_id}" + ], + getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], + getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"], + getPublicKeyForAuthenticatedUser: [ + "GET /user/codespaces/secrets/public-key" + ], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + getSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}" + ], + listDevcontainersInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/devcontainers" + ], + listForAuthenticatedUser: ["GET /user/codespaces"], + listInOrganization: [ + "GET /orgs/{org}/codespaces", + {}, + { renamedParameters: { org_id: "org" } } + ], + listInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces" + ], + listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], + listRepositoriesForSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}/repositories" + ], + listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + preFlightWithRepoForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/new" + ], + publishForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/publish" + ], + removeRepositoryForSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + repoMachinesForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/machines" + ], + setRepositoriesForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], + stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], + stopInOrganization: [ + "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop" + ], + updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] + }, + copilot: { + addCopilotSeatsForTeams: [ + "POST /orgs/{org}/copilot/billing/selected_teams" + ], + addCopilotSeatsForUsers: [ + "POST /orgs/{org}/copilot/billing/selected_users" + ], + cancelCopilotSeatAssignmentForTeams: [ + "DELETE /orgs/{org}/copilot/billing/selected_teams" + ], + cancelCopilotSeatAssignmentForUsers: [ + "DELETE /orgs/{org}/copilot/billing/selected_users" + ], + getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"], + getCopilotSeatDetailsForUser: [ + "GET /orgs/{org}/members/{username}/copilot" + ], + listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"] + }, + dependabot: { + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"], + getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/dependabot/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"], + listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" + ] + }, + dependencyGraph: { + createRepositorySnapshot: [ + "POST /repos/{owner}/{repo}/dependency-graph/snapshots" + ], + diffRange: [ + "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}" + ], + exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"] + }, + emojis: { get: ["GET /emojis"] }, + gists: { + checkIsStarred: ["GET /gists/{gist_id}/star"], + create: ["POST /gists"], + createComment: ["POST /gists/{gist_id}/comments"], + delete: ["DELETE /gists/{gist_id}"], + deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], + fork: ["POST /gists/{gist_id}/forks"], + get: ["GET /gists/{gist_id}"], + getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], + getRevision: ["GET /gists/{gist_id}/{sha}"], + list: ["GET /gists"], + listComments: ["GET /gists/{gist_id}/comments"], + listCommits: ["GET /gists/{gist_id}/commits"], + listForUser: ["GET /users/{username}/gists"], + listForks: ["GET /gists/{gist_id}/forks"], + listPublic: ["GET /gists/public"], + listStarred: ["GET /gists/starred"], + star: ["PUT /gists/{gist_id}/star"], + unstar: ["DELETE /gists/{gist_id}/star"], + update: ["PATCH /gists/{gist_id}"], + updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] + }, + git: { + createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], + createCommit: ["POST /repos/{owner}/{repo}/git/commits"], + createRef: ["POST /repos/{owner}/{repo}/git/refs"], + createTag: ["POST /repos/{owner}/{repo}/git/tags"], + createTree: ["POST /repos/{owner}/{repo}/git/trees"], + deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], + getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], + getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], + getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], + getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], + getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], + listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], + updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] + }, + gitignore: { + getAllTemplates: ["GET /gitignore/templates"], + getTemplate: ["GET /gitignore/templates/{name}"] + }, + interactions: { + getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], + getRestrictionsForYourPublicRepos: [ + "GET /user/interaction-limits", + {}, + { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] } + ], + removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], + removeRestrictionsForRepo: [ + "DELETE /repos/{owner}/{repo}/interaction-limits" + ], + removeRestrictionsForYourPublicRepos: [ + "DELETE /user/interaction-limits", + {}, + { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] } + ], + setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], + setRestrictionsForYourPublicRepos: [ + "PUT /user/interaction-limits", + {}, + { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] } + ] + }, + issues: { + addAssignees: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], + checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + checkUserCanBeAssignedToIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}" + ], + create: ["POST /repos/{owner}/{repo}/issues"], + createComment: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" + ], + createLabel: ["POST /repos/{owner}/{repo}/labels"], + createMilestone: ["POST /repos/{owner}/{repo}/milestones"], + deleteComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" + ], + deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], + deleteMilestone: [ + "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" + ], + get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], + getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], + getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], + getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], + getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], + list: ["GET /issues"], + listAssignees: ["GET /repos/{owner}/{repo}/assignees"], + listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], + listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], + listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], + listEventsForTimeline: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" + ], + listForAuthenticatedUser: ["GET /user/issues"], + listForOrg: ["GET /orgs/{org}/issues"], + listForRepo: ["GET /repos/{owner}/{repo}/issues"], + listLabelsForMilestone: [ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" + ], + listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], + listLabelsOnIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + listMilestones: ["GET /repos/{owner}/{repo}/milestones"], + lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], + removeAllLabels: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + removeAssignees: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + removeLabel: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" + ], + setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], + unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], + update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], + updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], + updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], + updateMilestone: [ + "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" + ] + }, + licenses: { + get: ["GET /licenses/{license}"], + getAllCommonlyUsed: ["GET /licenses"], + getForRepo: ["GET /repos/{owner}/{repo}/license"] + }, + markdown: { + render: ["POST /markdown"], + renderRaw: [ + "POST /markdown/raw", + { headers: { "content-type": "text/plain; charset=utf-8" } } + ] + }, + meta: { + get: ["GET /meta"], + getAllVersions: ["GET /versions"], + getOctocat: ["GET /octocat"], + getZen: ["GET /zen"], + root: ["GET /"] + }, + migrations: { + cancelImport: [ + "DELETE /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.cancelImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#cancel-an-import" + } + ], + deleteArchiveForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/archive" + ], + deleteArchiveForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/archive" + ], + downloadArchiveForOrg: [ + "GET /orgs/{org}/migrations/{migration_id}/archive" + ], + getArchiveForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/archive" + ], + getCommitAuthors: [ + "GET /repos/{owner}/{repo}/import/authors", + {}, + { + deprecated: "octokit.rest.migrations.getCommitAuthors() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-commit-authors" + } + ], + getImportStatus: [ + "GET /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.getImportStatus() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-an-import-status" + } + ], + getLargeFiles: [ + "GET /repos/{owner}/{repo}/import/large_files", + {}, + { + deprecated: "octokit.rest.migrations.getLargeFiles() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-large-files" + } + ], + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], + listForAuthenticatedUser: ["GET /user/migrations"], + listForOrg: ["GET /orgs/{org}/migrations"], + listReposForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/repositories" + ], + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], + listReposForUser: [ + "GET /user/migrations/{migration_id}/repositories", + {}, + { renamed: ["migrations", "listReposForAuthenticatedUser"] } + ], + mapCommitAuthor: [ + "PATCH /repos/{owner}/{repo}/import/authors/{author_id}", + {}, + { + deprecated: "octokit.rest.migrations.mapCommitAuthor() is deprecated, see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author" + } + ], + setLfsPreference: [ + "PATCH /repos/{owner}/{repo}/import/lfs", + {}, + { + deprecated: "octokit.rest.migrations.setLfsPreference() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference" + } + ], + startForAuthenticatedUser: ["POST /user/migrations"], + startForOrg: ["POST /orgs/{org}/migrations"], + startImport: [ + "PUT /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.startImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#start-an-import" + } + ], + unlockRepoForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" + ], + unlockRepoForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" + ], + updateImport: [ + "PATCH /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.updateImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-an-import" + } + ] + }, + oidc: { + getOidcCustomSubTemplateForOrg: [ + "GET /orgs/{org}/actions/oidc/customization/sub" + ], + updateOidcCustomSubTemplateForOrg: [ + "PUT /orgs/{org}/actions/oidc/customization/sub" + ] + }, + orgs: { + addSecurityManagerTeam: [ + "PUT /orgs/{org}/security-managers/teams/{team_slug}" + ], + assignTeamToOrgRole: [ + "PUT /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}" + ], + assignUserToOrgRole: [ + "PUT /orgs/{org}/organization-roles/users/{username}/{role_id}" + ], + blockUser: ["PUT /orgs/{org}/blocks/{username}"], + cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], + checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], + checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], + checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], + convertMemberToOutsideCollaborator: [ + "PUT /orgs/{org}/outside_collaborators/{username}" + ], + createCustomOrganizationRole: ["POST /orgs/{org}/organization-roles"], + createInvitation: ["POST /orgs/{org}/invitations"], + createOrUpdateCustomProperties: ["PATCH /orgs/{org}/properties/schema"], + createOrUpdateCustomPropertiesValuesForRepos: [ + "PATCH /orgs/{org}/properties/values" + ], + createOrUpdateCustomProperty: [ + "PUT /orgs/{org}/properties/schema/{custom_property_name}" + ], + createWebhook: ["POST /orgs/{org}/hooks"], + delete: ["DELETE /orgs/{org}"], + deleteCustomOrganizationRole: [ + "DELETE /orgs/{org}/organization-roles/{role_id}" + ], + deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + enableOrDisableSecurityProductOnAllOrgRepos: [ + "POST /orgs/{org}/{security_product}/{enablement}" + ], + get: ["GET /orgs/{org}"], + getAllCustomProperties: ["GET /orgs/{org}/properties/schema"], + getCustomProperty: [ + "GET /orgs/{org}/properties/schema/{custom_property_name}" + ], + getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], + getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], + getOrgRole: ["GET /orgs/{org}/organization-roles/{role_id}"], + getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], + getWebhookDelivery: [ + "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + list: ["GET /organizations"], + listAppInstallations: ["GET /orgs/{org}/installations"], + listBlockedUsers: ["GET /orgs/{org}/blocks"], + listCustomPropertiesValuesForRepos: ["GET /orgs/{org}/properties/values"], + listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], + listForAuthenticatedUser: ["GET /user/orgs"], + listForUser: ["GET /users/{username}/orgs"], + listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], + listMembers: ["GET /orgs/{org}/members"], + listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], + listOrgRoleTeams: ["GET /orgs/{org}/organization-roles/{role_id}/teams"], + listOrgRoleUsers: ["GET /orgs/{org}/organization-roles/{role_id}/users"], + listOrgRoles: ["GET /orgs/{org}/organization-roles"], + listOrganizationFineGrainedPermissions: [ + "GET /orgs/{org}/organization-fine-grained-permissions" + ], + listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPatGrantRepositories: [ + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories" + ], + listPatGrantRequestRepositories: [ + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories" + ], + listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"], + listPatGrants: ["GET /orgs/{org}/personal-access-tokens"], + listPendingInvitations: ["GET /orgs/{org}/invitations"], + listPublicMembers: ["GET /orgs/{org}/public_members"], + listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"], + listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], + listWebhooks: ["GET /orgs/{org}/hooks"], + patchCustomOrganizationRole: [ + "PATCH /orgs/{org}/organization-roles/{role_id}" + ], + pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeCustomProperty: [ + "DELETE /orgs/{org}/properties/schema/{custom_property_name}" + ], + removeMember: ["DELETE /orgs/{org}/members/{username}"], + removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], + removeOutsideCollaborator: [ + "DELETE /orgs/{org}/outside_collaborators/{username}" + ], + removePublicMembershipForAuthenticatedUser: [ + "DELETE /orgs/{org}/public_members/{username}" + ], + removeSecurityManagerTeam: [ + "DELETE /orgs/{org}/security-managers/teams/{team_slug}" + ], + reviewPatGrantRequest: [ + "POST /orgs/{org}/personal-access-token-requests/{pat_request_id}" + ], + reviewPatGrantRequestsInBulk: [ + "POST /orgs/{org}/personal-access-token-requests" + ], + revokeAllOrgRolesTeam: [ + "DELETE /orgs/{org}/organization-roles/teams/{team_slug}" + ], + revokeAllOrgRolesUser: [ + "DELETE /orgs/{org}/organization-roles/users/{username}" + ], + revokeOrgRoleTeam: [ + "DELETE /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}" + ], + revokeOrgRoleUser: [ + "DELETE /orgs/{org}/organization-roles/users/{username}/{role_id}" + ], + setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], + setPublicMembershipForAuthenticatedUser: [ + "PUT /orgs/{org}/public_members/{username}" + ], + unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], + update: ["PATCH /orgs/{org}"], + updateMembershipForAuthenticatedUser: [ + "PATCH /user/memberships/orgs/{org}" + ], + updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"], + updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"], + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], + updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] + }, + packages: { + deletePackageForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}" + ], + deletePackageForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}" + ], + deletePackageForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}" + ], + deletePackageVersionForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getAllPackageVersionsForAPackageOwnedByAnOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + {}, + { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] } + ], + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions", + {}, + { + renamed: [ + "packages", + "getAllPackageVersionsForPackageOwnedByAuthenticatedUser" + ] + } + ], + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions" + ], + getPackageForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}" + ], + getPackageForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}" + ], + getPackageForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}" + ], + getPackageVersionForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + listDockerMigrationConflictingPackagesForAuthenticatedUser: [ + "GET /user/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForOrganization: [ + "GET /orgs/{org}/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForUser: [ + "GET /users/{username}/docker/conflicts" + ], + listPackagesForAuthenticatedUser: ["GET /user/packages"], + listPackagesForOrganization: ["GET /orgs/{org}/packages"], + listPackagesForUser: ["GET /users/{username}/packages"], + restorePackageForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageVersionForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ] + }, + projects: { + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], + createCard: ["POST /projects/columns/{column_id}/cards"], + createColumn: ["POST /projects/{project_id}/columns"], + createForAuthenticatedUser: ["POST /user/projects"], + createForOrg: ["POST /orgs/{org}/projects"], + createForRepo: ["POST /repos/{owner}/{repo}/projects"], + delete: ["DELETE /projects/{project_id}"], + deleteCard: ["DELETE /projects/columns/cards/{card_id}"], + deleteColumn: ["DELETE /projects/columns/{column_id}"], + get: ["GET /projects/{project_id}"], + getCard: ["GET /projects/columns/cards/{card_id}"], + getColumn: ["GET /projects/columns/{column_id}"], + getPermissionForUser: [ + "GET /projects/{project_id}/collaborators/{username}/permission" + ], + listCards: ["GET /projects/columns/{column_id}/cards"], + listCollaborators: ["GET /projects/{project_id}/collaborators"], + listColumns: ["GET /projects/{project_id}/columns"], + listForOrg: ["GET /orgs/{org}/projects"], + listForRepo: ["GET /repos/{owner}/{repo}/projects"], + listForUser: ["GET /users/{username}/projects"], + moveCard: ["POST /projects/columns/cards/{card_id}/moves"], + moveColumn: ["POST /projects/columns/{column_id}/moves"], + removeCollaborator: [ + "DELETE /projects/{project_id}/collaborators/{username}" + ], + update: ["PATCH /projects/{project_id}"], + updateCard: ["PATCH /projects/columns/cards/{card_id}"], + updateColumn: ["PATCH /projects/columns/{column_id}"] + }, + pulls: { + checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + create: ["POST /repos/{owner}/{repo}/pulls"], + createReplyForReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" + ], + createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + createReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + deletePendingReview: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + deleteReviewComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ], + dismissReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" + ], + get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], + getReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + list: ["GET /repos/{owner}/{repo}/pulls"], + listCommentsForReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" + ], + listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], + listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], + listRequestedReviewers: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + listReviewComments: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], + listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + removeRequestedReviewers: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + requestReviewers: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + submitReview: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" + ], + update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], + updateBranch: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" + ], + updateReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + updateReviewComment: [ + "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ] + }, + rateLimit: { get: ["GET /rate_limit"] }, + reactions: { + createForCommitComment: [ + "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + createForIssue: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" + ], + createForIssueComment: [ + "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + createForPullRequestReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + createForRelease: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + createForTeamDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + createForTeamDiscussionInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ], + deleteForCommitComment: [ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForIssue: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" + ], + deleteForIssueComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForPullRequestComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForRelease: [ + "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}" + ], + deleteForTeamDiscussion: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" + ], + deleteForTeamDiscussionComment: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" + ], + listForCommitComment: [ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], + listForIssueComment: [ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + listForPullRequestReviewComment: [ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + listForRelease: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + listForTeamDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + listForTeamDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ] + }, + repos: { + acceptInvitation: [ + "PATCH /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] } + ], + acceptInvitationForAuthenticatedUser: [ + "PATCH /user/repository_invitations/{invitation_id}" + ], + addAppAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], + addStatusCheckContexts: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + addTeamAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + addUserAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + cancelPagesDeployment: [ + "POST /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}/cancel" + ], + checkAutomatedSecurityFixes: [ + "GET /repos/{owner}/{repo}/automated-security-fixes" + ], + checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], + checkVulnerabilityAlerts: [ + "GET /repos/{owner}/{repo}/vulnerability-alerts" + ], + codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], + compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + compareCommitsWithBasehead: [ + "GET /repos/{owner}/{repo}/compare/{basehead}" + ], + createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], + createCommitComment: [ + "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + createCommitSignatureProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], + createDeployKey: ["POST /repos/{owner}/{repo}/keys"], + createDeployment: ["POST /repos/{owner}/{repo}/deployments"], + createDeploymentBranchPolicy: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + createDeploymentProtectionRule: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + createDeploymentStatus: [ + "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], + createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], + createForAuthenticatedUser: ["POST /user/repos"], + createFork: ["POST /repos/{owner}/{repo}/forks"], + createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateCustomPropertiesValues: [ + "PATCH /repos/{owner}/{repo}/properties/values" + ], + createOrUpdateEnvironment: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}" + ], + createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createOrgRuleset: ["POST /orgs/{org}/rulesets"], + createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployments"], + createPagesSite: ["POST /repos/{owner}/{repo}/pages"], + createRelease: ["POST /repos/{owner}/{repo}/releases"], + createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"], + createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], + createUsingTemplate: [ + "POST /repos/{template_owner}/{template_repo}/generate" + ], + createWebhook: ["POST /repos/{owner}/{repo}/hooks"], + declineInvitation: [ + "DELETE /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "declineInvitationForAuthenticatedUser"] } + ], + declineInvitationForAuthenticatedUser: [ + "DELETE /user/repository_invitations/{invitation_id}" + ], + delete: ["DELETE /repos/{owner}/{repo}"], + deleteAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + deleteAdminBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + deleteAnEnvironment: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}" + ], + deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], + deleteBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" + ], + deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], + deleteCommitSignatureProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], + deleteDeployment: [ + "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" + ], + deleteDeploymentBranchPolicy: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], + deleteInvitation: [ + "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], + deletePullRequestReviewProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], + deleteReleaseAsset: [ + "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + deleteTagProtection: [ + "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}" + ], + deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], + disableAutomatedSecurityFixes: [ + "DELETE /repos/{owner}/{repo}/automated-security-fixes" + ], + disableDeploymentProtectionRule: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + disablePrivateVulnerabilityReporting: [ + "DELETE /repos/{owner}/{repo}/private-vulnerability-reporting" + ], + disableVulnerabilityAlerts: [ + "DELETE /repos/{owner}/{repo}/vulnerability-alerts" + ], + downloadArchive: [ + "GET /repos/{owner}/{repo}/zipball/{ref}", + {}, + { renamed: ["repos", "downloadZipballArchive"] } + ], + downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], + downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], + enableAutomatedSecurityFixes: [ + "PUT /repos/{owner}/{repo}/automated-security-fixes" + ], + enablePrivateVulnerabilityReporting: [ + "PUT /repos/{owner}/{repo}/private-vulnerability-reporting" + ], + enableVulnerabilityAlerts: [ + "PUT /repos/{owner}/{repo}/vulnerability-alerts" + ], + generateReleaseNotes: [ + "POST /repos/{owner}/{repo}/releases/generate-notes" + ], + get: ["GET /repos/{owner}/{repo}"], + getAccessRestrictions: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + getAdminBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + getAllDeploymentProtectionRules: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], + getAllStatusCheckContexts: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" + ], + getAllTopics: ["GET /repos/{owner}/{repo}/topics"], + getAppsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" + ], + getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], + getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], + getBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection" + ], + getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"], + getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], + getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], + getCollaboratorPermissionLevel: [ + "GET /repos/{owner}/{repo}/collaborators/{username}/permission" + ], + getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], + getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], + getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], + getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], + getCommitSignatureProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], + getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], + getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getCustomDeploymentProtectionRule: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + getCustomPropertiesValues: ["GET /repos/{owner}/{repo}/properties/values"], + getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], + getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], + getDeploymentBranchPolicy: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + getDeploymentStatus: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" + ], + getEnvironment: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}" + ], + getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], + getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getOrgRuleSuite: ["GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}"], + getOrgRuleSuites: ["GET /orgs/{org}/rulesets/rule-suites"], + getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"], + getOrgRulesets: ["GET /orgs/{org}/rulesets"], + getPages: ["GET /repos/{owner}/{repo}/pages"], + getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getPagesDeployment: [ + "GET /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}" + ], + getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], + getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], + getPullRequestReviewProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], + getReadme: ["GET /repos/{owner}/{repo}/readme"], + getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], + getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], + getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], + getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], + getRepoRuleSuite: [ + "GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}" + ], + getRepoRuleSuites: ["GET /repos/{owner}/{repo}/rulesets/rule-suites"], + getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"], + getStatusChecksProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + getTeamsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" + ], + getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], + getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], + getUsersWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" + ], + getViews: ["GET /repos/{owner}/{repo}/traffic/views"], + getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + getWebhookConfigForRepo: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + getWebhookDelivery: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + listActivities: ["GET /repos/{owner}/{repo}/activity"], + listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], + listBranches: ["GET /repos/{owner}/{repo}/branches"], + listBranchesForHeadCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" + ], + listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], + listCommentsForCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], + listCommitStatusesForRef: [ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses" + ], + listCommits: ["GET /repos/{owner}/{repo}/commits"], + listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listCustomDeploymentRuleIntegrations: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" + ], + listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], + listDeploymentBranchPolicies: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + listDeploymentStatuses: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], + listDeployments: ["GET /repos/{owner}/{repo}/deployments"], + listForAuthenticatedUser: ["GET /user/repos"], + listForOrg: ["GET /orgs/{org}/repos"], + listForUser: ["GET /users/{username}/repos"], + listForks: ["GET /repos/{owner}/{repo}/forks"], + listInvitations: ["GET /repos/{owner}/{repo}/invitations"], + listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], + listLanguages: ["GET /repos/{owner}/{repo}/languages"], + listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], + listPublic: ["GET /repositories"], + listPullRequestsAssociatedWithCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" + ], + listReleaseAssets: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets" + ], + listReleases: ["GET /repos/{owner}/{repo}/releases"], + listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], + listTags: ["GET /repos/{owner}/{repo}/tags"], + listTeams: ["GET /repos/{owner}/{repo}/teams"], + listWebhookDeliveries: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" + ], + listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], + merge: ["POST /repos/{owner}/{repo}/merges"], + mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], + pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeAppAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + removeCollaborator: [ + "DELETE /repos/{owner}/{repo}/collaborators/{username}" + ], + removeStatusCheckContexts: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + removeStatusCheckProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + removeTeamAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + removeUserAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], + requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], + setAdminBranchProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + setAppAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + setStatusCheckContexts: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + setTeamAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + setUserAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], + transfer: ["POST /repos/{owner}/{repo}/transfer"], + update: ["PATCH /repos/{owner}/{repo}"], + updateBranchProtection: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection" + ], + updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateDeploymentBranchPolicy: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], + updateInvitation: [ + "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"], + updatePullRequestReviewProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], + updateReleaseAsset: [ + "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + updateStatusCheckPotection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + {}, + { renamed: ["repos", "updateStatusCheckProtection"] } + ], + updateStatusCheckProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], + updateWebhookConfigForRepo: [ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + uploadReleaseAsset: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", + { baseUrl: "https://uploads.github.com" } + ] + }, + search: { + code: ["GET /search/code"], + commits: ["GET /search/commits"], + issuesAndPullRequests: ["GET /search/issues"], + labels: ["GET /search/labels"], + repos: ["GET /search/repositories"], + topics: ["GET /search/topics"], + users: ["GET /search/users"] + }, + secretScanning: { + getAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/secret-scanning/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], + listLocationsForAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ] + }, + securityAdvisories: { + createFork: [ + "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/forks" + ], + createPrivateVulnerabilityReport: [ + "POST /repos/{owner}/{repo}/security-advisories/reports" + ], + createRepositoryAdvisory: [ + "POST /repos/{owner}/{repo}/security-advisories" + ], + createRepositoryAdvisoryCveRequest: [ + "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve" + ], + getGlobalAdvisory: ["GET /advisories/{ghsa_id}"], + getRepositoryAdvisory: [ + "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ], + listGlobalAdvisories: ["GET /advisories"], + listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"], + listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"], + updateRepositoryAdvisory: [ + "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ] + }, + teams: { + addOrUpdateMembershipForUserInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + addOrUpdateProjectPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + addOrUpdateRepoPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + checkPermissionsForProjectInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + checkPermissionsForRepoInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + create: ["POST /orgs/{org}/teams"], + createDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], + createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], + deleteDiscussionCommentInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + deleteDiscussionInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], + getByName: ["GET /orgs/{org}/teams/{team_slug}"], + getDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + getDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + getMembershipForUserInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + list: ["GET /orgs/{org}/teams"], + listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], + listDiscussionCommentsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], + listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], + listForAuthenticatedUser: ["GET /user/teams"], + listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], + listPendingInvitationsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/invitations" + ], + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], + listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], + removeMembershipForUserInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + removeProjectInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + removeRepoInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + updateDiscussionCommentInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + updateDiscussionInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] + }, + users: { + addEmailForAuthenticated: [ + "POST /user/emails", + {}, + { renamed: ["users", "addEmailForAuthenticatedUser"] } + ], + addEmailForAuthenticatedUser: ["POST /user/emails"], + addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"], + block: ["PUT /user/blocks/{username}"], + checkBlocked: ["GET /user/blocks/{username}"], + checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], + checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], + createGpgKeyForAuthenticated: [ + "POST /user/gpg_keys", + {}, + { renamed: ["users", "createGpgKeyForAuthenticatedUser"] } + ], + createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], + createPublicSshKeyForAuthenticated: [ + "POST /user/keys", + {}, + { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] } + ], + createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], + createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"], + deleteEmailForAuthenticated: [ + "DELETE /user/emails", + {}, + { renamed: ["users", "deleteEmailForAuthenticatedUser"] } + ], + deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], + deleteGpgKeyForAuthenticated: [ + "DELETE /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] } + ], + deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicSshKeyForAuthenticated: [ + "DELETE /user/keys/{key_id}", + {}, + { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] } + ], + deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], + deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"], + deleteSshSigningKeyForAuthenticatedUser: [ + "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}" + ], + follow: ["PUT /user/following/{username}"], + getAuthenticated: ["GET /user"], + getByUsername: ["GET /users/{username}"], + getContextForUser: ["GET /users/{username}/hovercard"], + getGpgKeyForAuthenticated: [ + "GET /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "getGpgKeyForAuthenticatedUser"] } + ], + getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicSshKeyForAuthenticated: [ + "GET /user/keys/{key_id}", + {}, + { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] } + ], + getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], + getSshSigningKeyForAuthenticatedUser: [ + "GET /user/ssh_signing_keys/{ssh_signing_key_id}" + ], + list: ["GET /users"], + listBlockedByAuthenticated: [ + "GET /user/blocks", + {}, + { renamed: ["users", "listBlockedByAuthenticatedUser"] } + ], + listBlockedByAuthenticatedUser: ["GET /user/blocks"], + listEmailsForAuthenticated: [ + "GET /user/emails", + {}, + { renamed: ["users", "listEmailsForAuthenticatedUser"] } + ], + listEmailsForAuthenticatedUser: ["GET /user/emails"], + listFollowedByAuthenticated: [ + "GET /user/following", + {}, + { renamed: ["users", "listFollowedByAuthenticatedUser"] } + ], + listFollowedByAuthenticatedUser: ["GET /user/following"], + listFollowersForAuthenticatedUser: ["GET /user/followers"], + listFollowersForUser: ["GET /users/{username}/followers"], + listFollowingForUser: ["GET /users/{username}/following"], + listGpgKeysForAuthenticated: [ + "GET /user/gpg_keys", + {}, + { renamed: ["users", "listGpgKeysForAuthenticatedUser"] } + ], + listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], + listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], + listPublicEmailsForAuthenticated: [ + "GET /user/public_emails", + {}, + { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] } + ], + listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], + listPublicKeysForUser: ["GET /users/{username}/keys"], + listPublicSshKeysForAuthenticated: [ + "GET /user/keys", + {}, + { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] } + ], + listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], + listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"], + listSocialAccountsForUser: ["GET /users/{username}/social_accounts"], + listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"], + listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"], + setPrimaryEmailVisibilityForAuthenticated: [ + "PATCH /user/email/visibility", + {}, + { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] } + ], + setPrimaryEmailVisibilityForAuthenticatedUser: [ + "PATCH /user/email/visibility" + ], + unblock: ["DELETE /user/blocks/{username}"], + unfollow: ["DELETE /user/following/{username}"], + updateAuthenticated: ["PATCH /user"] + } +}; +var endpoints_default = Endpoints; + +// pkg/dist-src/endpoints-to-methods.js +var endpointMethodsMap = /* @__PURE__ */ new Map(); +for (const [scope, endpoints] of Object.entries(endpoints_default)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign( + { + method, + url + }, + defaults + ); + if (!endpointMethodsMap.has(scope)) { + endpointMethodsMap.set(scope, /* @__PURE__ */ new Map()); + } + endpointMethodsMap.get(scope).set(methodName, { + scope, + methodName, + endpointDefaults, + decorations + }); + } +} +var handler = { + has({ scope }, methodName) { + return endpointMethodsMap.get(scope).has(methodName); + }, + getOwnPropertyDescriptor(target, methodName) { + return { + value: this.get(target, methodName), + // ensures method is in the cache + configurable: true, + writable: true, + enumerable: true + }; + }, + defineProperty(target, methodName, descriptor) { + Object.defineProperty(target.cache, methodName, descriptor); + return true; + }, + deleteProperty(target, methodName) { + delete target.cache[methodName]; + return true; + }, + ownKeys({ scope }) { + return [...endpointMethodsMap.get(scope).keys()]; + }, + set(target, methodName, value) { + return target.cache[methodName] = value; + }, + get({ octokit, scope, cache }, methodName) { + if (cache[methodName]) { + return cache[methodName]; + } + const method = endpointMethodsMap.get(scope).get(methodName); + if (!method) { + return void 0; + } + const { endpointDefaults, decorations } = method; + if (decorations) { + cache[methodName] = decorate( + octokit, + scope, + methodName, + endpointDefaults, + decorations + ); + } else { + cache[methodName] = octokit.request.defaults(endpointDefaults); + } + return cache[methodName]; + } +}; +function endpointsToMethods(octokit) { + const newMethods = {}; + for (const scope of endpointMethodsMap.keys()) { + newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler); + } + return newMethods; +} +function decorate(octokit, scope, methodName, defaults, decorations) { + const requestWithDefaults = octokit.request.defaults(defaults); + function withDecorations(...args) { + let options = requestWithDefaults.endpoint.merge(...args); + if (decorations.mapToData) { + options = Object.assign({}, options, { + data: options[decorations.mapToData], + [decorations.mapToData]: void 0 + }); + return requestWithDefaults(options); + } + if (decorations.renamed) { + const [newScope, newMethodName] = decorations.renamed; + octokit.log.warn( + `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()` + ); + } + if (decorations.deprecated) { + octokit.log.warn(decorations.deprecated); + } + if (decorations.renamedParameters) { + const options2 = requestWithDefaults.endpoint.merge(...args); + for (const [name, alias] of Object.entries( + decorations.renamedParameters + )) { + if (name in options2) { + octokit.log.warn( + `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead` + ); + if (!(alias in options2)) { + options2[alias] = options2[name]; + } + delete options2[name]; + } + } + return requestWithDefaults(options2); + } + return requestWithDefaults(...args); + } + return Object.assign(withDecorations, requestWithDefaults); +} + +// pkg/dist-src/index.js +function restEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + rest: api + }; +} +restEndpointMethods.VERSION = VERSION; +function legacyRestEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + ...api, + rest: api + }; +} +legacyRestEndpointMethods.VERSION = VERSION; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 3708: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + RequestError: () => RequestError +}); +module.exports = __toCommonJS(dist_src_exports); +var import_deprecation = __nccwpck_require__(4150); +var import_once = __toESM(__nccwpck_require__(5560)); +var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation)); +var RequestError = class extends Error { + constructor(message, statusCode, options) { + super(message); + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + this.name = "HttpError"; + this.status = statusCode; + let headers; + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } + const requestCopy = Object.assign({}, options.request); + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace( + / .*$/, + " [REDACTED]" + ) + }); + } + requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; + Object.defineProperty(this, "code", { + get() { + logOnceCode( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.code` is deprecated, use `error.status`." + ) + ); + return statusCode; + } + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`." + ) + ); + return headers || {}; + } + }); + } +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 8636: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// pkg/dist-src/index.js +var dist_src_exports = {}; +__export(dist_src_exports, { + request: () => request +}); +module.exports = __toCommonJS(dist_src_exports); +var import_endpoint = __nccwpck_require__(4471); +var import_universal_user_agent = __nccwpck_require__(3843); + +// pkg/dist-src/version.js +var VERSION = "8.4.0"; + +// pkg/dist-src/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} + +// pkg/dist-src/fetch-wrapper.js +var import_request_error = __nccwpck_require__(3708); + +// pkg/dist-src/get-buffer-response.js +function getBufferResponse(response) { + return response.arrayBuffer(); +} + +// pkg/dist-src/fetch-wrapper.js +function fetchWrapper(requestOptions) { + var _a, _b, _c, _d; + const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; + const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false; + if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } + let headers = {}; + let status; + let url; + let { fetch } = globalThis; + if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) { + fetch = requestOptions.request.fetch; + } + if (!fetch) { + throw new Error( + "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing" + ); + } + return fetch(requestOptions.url, { + method: requestOptions.method, + body: requestOptions.body, + redirect: (_c = requestOptions.request) == null ? void 0 : _c.redirect, + headers: requestOptions.headers, + signal: (_d = requestOptions.request) == null ? void 0 : _d.signal, + // duplex must be set if request.body is ReadableStream or Async Iterables. + // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. + ...requestOptions.body && { duplex: "half" } + }).then(async (response) => { + url = response.url; + status = response.status; + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; + } + if ("deprecation" in headers) { + const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn( + `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` + ); + } + if (status === 204 || status === 205) { + return; + } + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; + } + throw new import_request_error.RequestError(response.statusText, status, { + response: { + url, + status, + headers, + data: void 0 + }, + request: requestOptions + }); + } + if (status === 304) { + throw new import_request_error.RequestError("Not modified", status, { + response: { + url, + status, + headers, + data: await getResponseData(response) + }, + request: requestOptions + }); + } + if (status >= 400) { + const data = await getResponseData(response); + const error = new import_request_error.RequestError(toErrorMessage(data), status, { + response: { + url, + status, + headers, + data + }, + request: requestOptions + }); + throw error; + } + return parseSuccessResponseBody ? await getResponseData(response) : response.body; + }).then((data) => { + return { + status, + url, + headers, + data + }; + }).catch((error) => { + if (error instanceof import_request_error.RequestError) + throw error; + else if (error.name === "AbortError") + throw error; + let message = error.message; + if (error.name === "TypeError" && "cause" in error) { + if (error.cause instanceof Error) { + message = error.cause.message; + } else if (typeof error.cause === "string") { + message = error.cause; + } + } + throw new import_request_error.RequestError(message, 500, { + request: requestOptions + }); + }); +} +async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + if (/application\/json/.test(contentType)) { + return response.json().catch(() => response.text()).catch(() => ""); + } + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + return getBufferResponse(response); +} +function toErrorMessage(data) { + if (typeof data === "string") + return data; + let suffix; + if ("documentation_url" in data) { + suffix = ` - ${data.documentation_url}`; + } else { + suffix = ""; + } + if ("message" in data) { + if (Array.isArray(data.errors)) { + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}${suffix}`; + } + return `${data.message}${suffix}`; + } + return `Unknown error: ${JSON.stringify(data)}`; +} + +// pkg/dist-src/with-defaults.js +function withDefaults(oldEndpoint, newDefaults) { + const endpoint2 = oldEndpoint.defaults(newDefaults); + const newApi = function(route, parameters) { + const endpointOptions = endpoint2.merge(route, parameters); + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint2.parse(endpointOptions)); + } + const request2 = (route2, parameters2) => { + return fetchWrapper( + endpoint2.parse(endpoint2.merge(route2, parameters2)) + ); + }; + Object.assign(request2, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); + return endpointOptions.request.hook(request2, endpointOptions); + }; + return Object.assign(newApi, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); +} + +// pkg/dist-src/index.js +var request = withDefaults(import_endpoint.endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` + } +}); +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 9316: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + CONFIG_USE_DUALSTACK_ENDPOINT: () => CONFIG_USE_DUALSTACK_ENDPOINT, + CONFIG_USE_FIPS_ENDPOINT: () => CONFIG_USE_FIPS_ENDPOINT, + DEFAULT_USE_DUALSTACK_ENDPOINT: () => DEFAULT_USE_DUALSTACK_ENDPOINT, + DEFAULT_USE_FIPS_ENDPOINT: () => DEFAULT_USE_FIPS_ENDPOINT, + ENV_USE_DUALSTACK_ENDPOINT: () => ENV_USE_DUALSTACK_ENDPOINT, + ENV_USE_FIPS_ENDPOINT: () => ENV_USE_FIPS_ENDPOINT, + NODE_REGION_CONFIG_FILE_OPTIONS: () => NODE_REGION_CONFIG_FILE_OPTIONS, + NODE_REGION_CONFIG_OPTIONS: () => NODE_REGION_CONFIG_OPTIONS, + NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS: () => NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS, + NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS: () => NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS, + REGION_ENV_NAME: () => REGION_ENV_NAME, + REGION_INI_NAME: () => REGION_INI_NAME, + getRegionInfo: () => getRegionInfo, + resolveCustomEndpointsConfig: () => resolveCustomEndpointsConfig, + resolveEndpointsConfig: () => resolveEndpointsConfig, + resolveRegionConfig: () => resolveRegionConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/endpointsConfig/NodeUseDualstackEndpointConfigOptions.ts +var import_util_config_provider = __nccwpck_require__(6716); +var ENV_USE_DUALSTACK_ENDPOINT = "AWS_USE_DUALSTACK_ENDPOINT"; +var CONFIG_USE_DUALSTACK_ENDPOINT = "use_dualstack_endpoint"; +var DEFAULT_USE_DUALSTACK_ENDPOINT = false; +var NODE_USE_DUALSTACK_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => (0, import_util_config_provider.booleanSelector)(env, ENV_USE_DUALSTACK_ENDPOINT, import_util_config_provider.SelectorType.ENV), + configFileSelector: (profile) => (0, import_util_config_provider.booleanSelector)(profile, CONFIG_USE_DUALSTACK_ENDPOINT, import_util_config_provider.SelectorType.CONFIG), + default: false +}; + +// src/endpointsConfig/NodeUseFipsEndpointConfigOptions.ts + +var ENV_USE_FIPS_ENDPOINT = "AWS_USE_FIPS_ENDPOINT"; +var CONFIG_USE_FIPS_ENDPOINT = "use_fips_endpoint"; +var DEFAULT_USE_FIPS_ENDPOINT = false; +var NODE_USE_FIPS_ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => (0, import_util_config_provider.booleanSelector)(env, ENV_USE_FIPS_ENDPOINT, import_util_config_provider.SelectorType.ENV), + configFileSelector: (profile) => (0, import_util_config_provider.booleanSelector)(profile, CONFIG_USE_FIPS_ENDPOINT, import_util_config_provider.SelectorType.CONFIG), + default: false +}; + +// src/endpointsConfig/resolveCustomEndpointsConfig.ts +var import_util_middleware = __nccwpck_require__(6324); +var resolveCustomEndpointsConfig = /* @__PURE__ */ __name((input) => { + const { endpoint, urlParser } = input; + return { + ...input, + tls: input.tls ?? true, + endpoint: (0, import_util_middleware.normalizeProvider)(typeof endpoint === "string" ? urlParser(endpoint) : endpoint), + isCustomEndpoint: true, + useDualstackEndpoint: (0, import_util_middleware.normalizeProvider)(input.useDualstackEndpoint ?? false) + }; +}, "resolveCustomEndpointsConfig"); + +// src/endpointsConfig/resolveEndpointsConfig.ts + + +// src/endpointsConfig/utils/getEndpointFromRegion.ts +var getEndpointFromRegion = /* @__PURE__ */ __name(async (input) => { + const { tls = true } = input; + const region = await input.region(); + const dnsHostRegex = new RegExp(/^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9])$/); + if (!dnsHostRegex.test(region)) { + throw new Error("Invalid region in client config"); + } + const useDualstackEndpoint = await input.useDualstackEndpoint(); + const useFipsEndpoint = await input.useFipsEndpoint(); + const { hostname } = await input.regionInfoProvider(region, { useDualstackEndpoint, useFipsEndpoint }) ?? {}; + if (!hostname) { + throw new Error("Cannot resolve hostname from client config"); + } + return input.urlParser(`${tls ? "https:" : "http:"}//${hostname}`); +}, "getEndpointFromRegion"); + +// src/endpointsConfig/resolveEndpointsConfig.ts +var resolveEndpointsConfig = /* @__PURE__ */ __name((input) => { + const useDualstackEndpoint = (0, import_util_middleware.normalizeProvider)(input.useDualstackEndpoint ?? false); + const { endpoint, useFipsEndpoint, urlParser } = input; + return { + ...input, + tls: input.tls ?? true, + endpoint: endpoint ? (0, import_util_middleware.normalizeProvider)(typeof endpoint === "string" ? urlParser(endpoint) : endpoint) : () => getEndpointFromRegion({ ...input, useDualstackEndpoint, useFipsEndpoint }), + isCustomEndpoint: !!endpoint, + useDualstackEndpoint + }; +}, "resolveEndpointsConfig"); + +// src/regionConfig/config.ts +var REGION_ENV_NAME = "AWS_REGION"; +var REGION_INI_NAME = "region"; +var NODE_REGION_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[REGION_ENV_NAME], + configFileSelector: (profile) => profile[REGION_INI_NAME], + default: () => { + throw new Error("Region is missing"); + } +}; +var NODE_REGION_CONFIG_FILE_OPTIONS = { + preferredFile: "credentials" +}; + +// src/regionConfig/isFipsRegion.ts +var isFipsRegion = /* @__PURE__ */ __name((region) => typeof region === "string" && (region.startsWith("fips-") || region.endsWith("-fips")), "isFipsRegion"); + +// src/regionConfig/getRealRegion.ts +var getRealRegion = /* @__PURE__ */ __name((region) => isFipsRegion(region) ? ["fips-aws-global", "aws-fips"].includes(region) ? "us-east-1" : region.replace(/fips-(dkr-|prod-)?|-fips/, "") : region, "getRealRegion"); + +// src/regionConfig/resolveRegionConfig.ts +var resolveRegionConfig = /* @__PURE__ */ __name((input) => { + const { region, useFipsEndpoint } = input; + if (!region) { + throw new Error("Region is missing"); + } + return { + ...input, + region: async () => { + if (typeof region === "string") { + return getRealRegion(region); + } + const providedRegion = await region(); + return getRealRegion(providedRegion); + }, + useFipsEndpoint: async () => { + const providedRegion = typeof region === "string" ? region : await region(); + if (isFipsRegion(providedRegion)) { + return true; + } + return typeof useFipsEndpoint !== "function" ? Promise.resolve(!!useFipsEndpoint) : useFipsEndpoint(); + } + }; +}, "resolveRegionConfig"); + +// src/regionInfo/getHostnameFromVariants.ts +var getHostnameFromVariants = /* @__PURE__ */ __name((variants = [], { useFipsEndpoint, useDualstackEndpoint }) => { + var _a; + return (_a = variants.find( + ({ tags }) => useFipsEndpoint === tags.includes("fips") && useDualstackEndpoint === tags.includes("dualstack") + )) == null ? void 0 : _a.hostname; +}, "getHostnameFromVariants"); + +// src/regionInfo/getResolvedHostname.ts +var getResolvedHostname = /* @__PURE__ */ __name((resolvedRegion, { regionHostname, partitionHostname }) => regionHostname ? regionHostname : partitionHostname ? partitionHostname.replace("{region}", resolvedRegion) : void 0, "getResolvedHostname"); + +// src/regionInfo/getResolvedPartition.ts +var getResolvedPartition = /* @__PURE__ */ __name((region, { partitionHash }) => Object.keys(partitionHash || {}).find((key) => partitionHash[key].regions.includes(region)) ?? "aws", "getResolvedPartition"); + +// src/regionInfo/getResolvedSigningRegion.ts +var getResolvedSigningRegion = /* @__PURE__ */ __name((hostname, { signingRegion, regionRegex, useFipsEndpoint }) => { + if (signingRegion) { + return signingRegion; + } else if (useFipsEndpoint) { + const regionRegexJs = regionRegex.replace("\\\\", "\\").replace(/^\^/g, "\\.").replace(/\$$/g, "\\."); + const regionRegexmatchArray = hostname.match(regionRegexJs); + if (regionRegexmatchArray) { + return regionRegexmatchArray[0].slice(1, -1); + } + } +}, "getResolvedSigningRegion"); + +// src/regionInfo/getRegionInfo.ts +var getRegionInfo = /* @__PURE__ */ __name((region, { + useFipsEndpoint = false, + useDualstackEndpoint = false, + signingService, + regionHash, + partitionHash +}) => { + var _a, _b, _c, _d, _e; + const partition = getResolvedPartition(region, { partitionHash }); + const resolvedRegion = region in regionHash ? region : ((_a = partitionHash[partition]) == null ? void 0 : _a.endpoint) ?? region; + const hostnameOptions = { useFipsEndpoint, useDualstackEndpoint }; + const regionHostname = getHostnameFromVariants((_b = regionHash[resolvedRegion]) == null ? void 0 : _b.variants, hostnameOptions); + const partitionHostname = getHostnameFromVariants((_c = partitionHash[partition]) == null ? void 0 : _c.variants, hostnameOptions); + const hostname = getResolvedHostname(resolvedRegion, { regionHostname, partitionHostname }); + if (hostname === void 0) { + throw new Error(`Endpoint resolution failed for: ${{ resolvedRegion, useFipsEndpoint, useDualstackEndpoint }}`); + } + const signingRegion = getResolvedSigningRegion(hostname, { + signingRegion: (_d = regionHash[resolvedRegion]) == null ? void 0 : _d.signingRegion, + regionRegex: partitionHash[partition].regionRegex, + useFipsEndpoint + }); + return { + partition, + signingService, + hostname, + ...signingRegion && { signingRegion }, + ...((_e = regionHash[resolvedRegion]) == null ? void 0 : _e.signingService) && { + signingService: regionHash[resolvedRegion].signingService + } + }; +}, "getRegionInfo"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 402: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + DefaultIdentityProviderConfig: () => DefaultIdentityProviderConfig, + EXPIRATION_MS: () => EXPIRATION_MS, + HttpApiKeyAuthSigner: () => HttpApiKeyAuthSigner, + HttpBearerAuthSigner: () => HttpBearerAuthSigner, + NoAuthSigner: () => NoAuthSigner, + createIsIdentityExpiredFunction: () => createIsIdentityExpiredFunction, + createPaginator: () => createPaginator, + doesIdentityRequireRefresh: () => doesIdentityRequireRefresh, + getHttpAuthSchemeEndpointRuleSetPlugin: () => getHttpAuthSchemeEndpointRuleSetPlugin, + getHttpAuthSchemePlugin: () => getHttpAuthSchemePlugin, + getHttpSigningPlugin: () => getHttpSigningPlugin, + getSmithyContext: () => getSmithyContext, + httpAuthSchemeEndpointRuleSetMiddlewareOptions: () => httpAuthSchemeEndpointRuleSetMiddlewareOptions, + httpAuthSchemeMiddleware: () => httpAuthSchemeMiddleware, + httpAuthSchemeMiddlewareOptions: () => httpAuthSchemeMiddlewareOptions, + httpSigningMiddleware: () => httpSigningMiddleware, + httpSigningMiddlewareOptions: () => httpSigningMiddlewareOptions, + isIdentityExpired: () => isIdentityExpired, + memoizeIdentityProvider: () => memoizeIdentityProvider, + normalizeProvider: () => normalizeProvider, + requestBuilder: () => import_protocols.requestBuilder, + setFeature: () => setFeature +}); +module.exports = __toCommonJS(src_exports); + +// src/getSmithyContext.ts +var import_types = __nccwpck_require__(690); +var getSmithyContext = /* @__PURE__ */ __name((context) => context[import_types.SMITHY_CONTEXT_KEY] || (context[import_types.SMITHY_CONTEXT_KEY] = {}), "getSmithyContext"); + +// src/middleware-http-auth-scheme/httpAuthSchemeMiddleware.ts +var import_util_middleware = __nccwpck_require__(6324); +function convertHttpAuthSchemesToMap(httpAuthSchemes) { + const map = /* @__PURE__ */ new Map(); + for (const scheme of httpAuthSchemes) { + map.set(scheme.schemeId, scheme); + } + return map; +} +__name(convertHttpAuthSchemesToMap, "convertHttpAuthSchemesToMap"); +var httpAuthSchemeMiddleware = /* @__PURE__ */ __name((config, mwOptions) => (next, context) => async (args) => { + var _a; + const options = config.httpAuthSchemeProvider( + await mwOptions.httpAuthSchemeParametersProvider(config, context, args.input) + ); + const authSchemes = convertHttpAuthSchemesToMap(config.httpAuthSchemes); + const smithyContext = (0, import_util_middleware.getSmithyContext)(context); + const failureReasons = []; + for (const option of options) { + const scheme = authSchemes.get(option.schemeId); + if (!scheme) { + failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` was not enabled for this service.`); + continue; + } + const identityProvider = scheme.identityProvider(await mwOptions.identityProviderConfigProvider(config)); + if (!identityProvider) { + failureReasons.push(`HttpAuthScheme \`${option.schemeId}\` did not have an IdentityProvider configured.`); + continue; + } + const { identityProperties = {}, signingProperties = {} } = ((_a = option.propertiesExtractor) == null ? void 0 : _a.call(option, config, context)) || {}; + option.identityProperties = Object.assign(option.identityProperties || {}, identityProperties); + option.signingProperties = Object.assign(option.signingProperties || {}, signingProperties); + smithyContext.selectedHttpAuthScheme = { + httpAuthOption: option, + identity: await identityProvider(option.identityProperties), + signer: scheme.signer + }; + break; + } + if (!smithyContext.selectedHttpAuthScheme) { + throw new Error(failureReasons.join("\n")); + } + return next(args); +}, "httpAuthSchemeMiddleware"); + +// src/middleware-http-auth-scheme/getHttpAuthSchemeEndpointRuleSetPlugin.ts +var httpAuthSchemeEndpointRuleSetMiddlewareOptions = { + step: "serialize", + tags: ["HTTP_AUTH_SCHEME"], + name: "httpAuthSchemeMiddleware", + override: true, + relation: "before", + toMiddleware: "endpointV2Middleware" +}; +var getHttpAuthSchemeEndpointRuleSetPlugin = /* @__PURE__ */ __name((config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider +}) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo( + httpAuthSchemeMiddleware(config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider + }), + httpAuthSchemeEndpointRuleSetMiddlewareOptions + ); + } +}), "getHttpAuthSchemeEndpointRuleSetPlugin"); + +// src/middleware-http-auth-scheme/getHttpAuthSchemePlugin.ts +var import_middleware_serde = __nccwpck_require__(3255); +var httpAuthSchemeMiddlewareOptions = { + step: "serialize", + tags: ["HTTP_AUTH_SCHEME"], + name: "httpAuthSchemeMiddleware", + override: true, + relation: "before", + toMiddleware: import_middleware_serde.serializerMiddlewareOption.name +}; +var getHttpAuthSchemePlugin = /* @__PURE__ */ __name((config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider +}) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo( + httpAuthSchemeMiddleware(config, { + httpAuthSchemeParametersProvider, + identityProviderConfigProvider + }), + httpAuthSchemeMiddlewareOptions + ); + } +}), "getHttpAuthSchemePlugin"); + +// src/middleware-http-signing/httpSigningMiddleware.ts +var import_protocol_http = __nccwpck_require__(2356); + +var defaultErrorHandler = /* @__PURE__ */ __name((signingProperties) => (error) => { + throw error; +}, "defaultErrorHandler"); +var defaultSuccessHandler = /* @__PURE__ */ __name((httpResponse, signingProperties) => { +}, "defaultSuccessHandler"); +var httpSigningMiddleware = /* @__PURE__ */ __name((config) => (next, context) => async (args) => { + if (!import_protocol_http.HttpRequest.isInstance(args.request)) { + return next(args); + } + const smithyContext = (0, import_util_middleware.getSmithyContext)(context); + const scheme = smithyContext.selectedHttpAuthScheme; + if (!scheme) { + throw new Error(`No HttpAuthScheme was selected: unable to sign request`); + } + const { + httpAuthOption: { signingProperties = {} }, + identity, + signer + } = scheme; + const output = await next({ + ...args, + request: await signer.sign(args.request, identity, signingProperties) + }).catch((signer.errorHandler || defaultErrorHandler)(signingProperties)); + (signer.successHandler || defaultSuccessHandler)(output.response, signingProperties); + return output; +}, "httpSigningMiddleware"); + +// src/middleware-http-signing/getHttpSigningMiddleware.ts +var httpSigningMiddlewareOptions = { + step: "finalizeRequest", + tags: ["HTTP_SIGNING"], + name: "httpSigningMiddleware", + aliases: ["apiKeyMiddleware", "tokenMiddleware", "awsAuthMiddleware"], + override: true, + relation: "after", + toMiddleware: "retryMiddleware" +}; +var getHttpSigningPlugin = /* @__PURE__ */ __name((config) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(httpSigningMiddleware(config), httpSigningMiddlewareOptions); + } +}), "getHttpSigningPlugin"); + +// src/normalizeProvider.ts +var normalizeProvider = /* @__PURE__ */ __name((input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; +}, "normalizeProvider"); + +// src/pagination/createPaginator.ts +var makePagedClientRequest = /* @__PURE__ */ __name(async (CommandCtor, client, input, ...args) => { + return await client.send(new CommandCtor(input), ...args); +}, "makePagedClientRequest"); +function createPaginator(ClientCtor, CommandCtor, inputTokenName, outputTokenName, pageSizeTokenName) { + return /* @__PURE__ */ __name(async function* paginateOperation(config, input, ...additionalArguments) { + let token = config.startingToken || void 0; + let hasNext = true; + let page; + while (hasNext) { + input[inputTokenName] = token; + if (pageSizeTokenName) { + input[pageSizeTokenName] = input[pageSizeTokenName] ?? config.pageSize; + } + if (config.client instanceof ClientCtor) { + page = await makePagedClientRequest(CommandCtor, config.client, input, ...additionalArguments); + } else { + throw new Error(`Invalid client, expected instance of ${ClientCtor.name}`); + } + yield page; + const prevToken = token; + token = get(page, outputTokenName); + hasNext = !!(token && (!config.stopOnSameToken || token !== prevToken)); + } + return void 0; + }, "paginateOperation"); +} +__name(createPaginator, "createPaginator"); +var get = /* @__PURE__ */ __name((fromObject, path) => { + let cursor = fromObject; + const pathComponents = path.split("."); + for (const step of pathComponents) { + if (!cursor || typeof cursor !== "object") { + return void 0; + } + cursor = cursor[step]; + } + return cursor; +}, "get"); + +// src/protocols/requestBuilder.ts +var import_protocols = __nccwpck_require__(3422); + +// src/setFeature.ts +function setFeature(context, feature, value) { + if (!context.__smithy_context) { + context.__smithy_context = { + features: {} + }; + } else if (!context.__smithy_context.features) { + context.__smithy_context.features = {}; + } + context.__smithy_context.features[feature] = value; +} +__name(setFeature, "setFeature"); + +// src/util-identity-and-auth/DefaultIdentityProviderConfig.ts +var _DefaultIdentityProviderConfig = class _DefaultIdentityProviderConfig { + /** + * Creates an IdentityProviderConfig with a record of scheme IDs to identity providers. + * + * @param config scheme IDs and identity providers to configure + */ + constructor(config) { + this.authSchemes = /* @__PURE__ */ new Map(); + for (const [key, value] of Object.entries(config)) { + if (value !== void 0) { + this.authSchemes.set(key, value); + } + } + } + getIdentityProvider(schemeId) { + return this.authSchemes.get(schemeId); + } +}; +__name(_DefaultIdentityProviderConfig, "DefaultIdentityProviderConfig"); +var DefaultIdentityProviderConfig = _DefaultIdentityProviderConfig; + +// src/util-identity-and-auth/httpAuthSchemes/httpApiKeyAuth.ts + + +var _HttpApiKeyAuthSigner = class _HttpApiKeyAuthSigner { + async sign(httpRequest, identity, signingProperties) { + if (!signingProperties) { + throw new Error( + "request could not be signed with `apiKey` since the `name` and `in` signer properties are missing" + ); + } + if (!signingProperties.name) { + throw new Error("request could not be signed with `apiKey` since the `name` signer property is missing"); + } + if (!signingProperties.in) { + throw new Error("request could not be signed with `apiKey` since the `in` signer property is missing"); + } + if (!identity.apiKey) { + throw new Error("request could not be signed with `apiKey` since the `apiKey` is not defined"); + } + const clonedRequest = import_protocol_http.HttpRequest.clone(httpRequest); + if (signingProperties.in === import_types.HttpApiKeyAuthLocation.QUERY) { + clonedRequest.query[signingProperties.name] = identity.apiKey; + } else if (signingProperties.in === import_types.HttpApiKeyAuthLocation.HEADER) { + clonedRequest.headers[signingProperties.name] = signingProperties.scheme ? `${signingProperties.scheme} ${identity.apiKey}` : identity.apiKey; + } else { + throw new Error( + "request can only be signed with `apiKey` locations `query` or `header`, but found: `" + signingProperties.in + "`" + ); + } + return clonedRequest; + } +}; +__name(_HttpApiKeyAuthSigner, "HttpApiKeyAuthSigner"); +var HttpApiKeyAuthSigner = _HttpApiKeyAuthSigner; + +// src/util-identity-and-auth/httpAuthSchemes/httpBearerAuth.ts + +var _HttpBearerAuthSigner = class _HttpBearerAuthSigner { + async sign(httpRequest, identity, signingProperties) { + const clonedRequest = import_protocol_http.HttpRequest.clone(httpRequest); + if (!identity.token) { + throw new Error("request could not be signed with `token` since the `token` is not defined"); + } + clonedRequest.headers["Authorization"] = `Bearer ${identity.token}`; + return clonedRequest; + } +}; +__name(_HttpBearerAuthSigner, "HttpBearerAuthSigner"); +var HttpBearerAuthSigner = _HttpBearerAuthSigner; + +// src/util-identity-and-auth/httpAuthSchemes/noAuth.ts +var _NoAuthSigner = class _NoAuthSigner { + async sign(httpRequest, identity, signingProperties) { + return httpRequest; + } +}; +__name(_NoAuthSigner, "NoAuthSigner"); +var NoAuthSigner = _NoAuthSigner; + +// src/util-identity-and-auth/memoizeIdentityProvider.ts +var createIsIdentityExpiredFunction = /* @__PURE__ */ __name((expirationMs) => (identity) => doesIdentityRequireRefresh(identity) && identity.expiration.getTime() - Date.now() < expirationMs, "createIsIdentityExpiredFunction"); +var EXPIRATION_MS = 3e5; +var isIdentityExpired = createIsIdentityExpiredFunction(EXPIRATION_MS); +var doesIdentityRequireRefresh = /* @__PURE__ */ __name((identity) => identity.expiration !== void 0, "doesIdentityRequireRefresh"); +var memoizeIdentityProvider = /* @__PURE__ */ __name((provider, isExpired, requiresRefresh) => { + if (provider === void 0) { + return void 0; + } + const normalizedProvider = typeof provider !== "function" ? async () => Promise.resolve(provider) : provider; + let resolved; + let pending; + let hasResult; + let isConstant = false; + const coalesceProvider = /* @__PURE__ */ __name(async (options) => { + if (!pending) { + pending = normalizedProvider(options); + } + try { + resolved = await pending; + hasResult = true; + isConstant = false; + } finally { + pending = void 0; + } + return resolved; + }, "coalesceProvider"); + if (isExpired === void 0) { + return async (options) => { + if (!hasResult || (options == null ? void 0 : options.forceRefresh)) { + resolved = await coalesceProvider(options); + } + return resolved; + }; + } + return async (options) => { + if (!hasResult || (options == null ? void 0 : options.forceRefresh)) { + resolved = await coalesceProvider(options); + } + if (isConstant) { + return resolved; + } + if (!requiresRefresh(resolved)) { + isConstant = true; + return resolved; + } + if (isExpired(resolved)) { + await coalesceProvider(options); + return resolved; + } + return resolved; + }; +}, "memoizeIdentityProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 3422: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/submodules/protocols/index.ts +var protocols_exports = {}; +__export(protocols_exports, { + RequestBuilder: () => RequestBuilder, + collectBody: () => collectBody, + extendedEncodeURIComponent: () => extendedEncodeURIComponent, + requestBuilder: () => requestBuilder, + resolvedPath: () => resolvedPath +}); +module.exports = __toCommonJS(protocols_exports); + +// src/submodules/protocols/collect-stream-body.ts +var import_util_stream = __nccwpck_require__(4252); +var collectBody = async (streamBody = new Uint8Array(), context) => { + if (streamBody instanceof Uint8Array) { + return import_util_stream.Uint8ArrayBlobAdapter.mutate(streamBody); + } + if (!streamBody) { + return import_util_stream.Uint8ArrayBlobAdapter.mutate(new Uint8Array()); + } + const fromContext = context.streamCollector(streamBody); + return import_util_stream.Uint8ArrayBlobAdapter.mutate(await fromContext); +}; + +// src/submodules/protocols/extended-encode-uri-component.ts +function extendedEncodeURIComponent(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} + +// src/submodules/protocols/requestBuilder.ts +var import_protocol_http = __nccwpck_require__(2356); + +// src/submodules/protocols/resolve-path.ts +var resolvedPath = (resolvedPath2, input, memberName, labelValueProvider, uriLabel, isGreedyLabel) => { + if (input != null && input[memberName] !== void 0) { + const labelValue = labelValueProvider(); + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: " + memberName + "."); + } + resolvedPath2 = resolvedPath2.replace( + uriLabel, + isGreedyLabel ? labelValue.split("/").map((segment) => extendedEncodeURIComponent(segment)).join("/") : extendedEncodeURIComponent(labelValue) + ); + } else { + throw new Error("No value provided for input HTTP label: " + memberName + "."); + } + return resolvedPath2; +}; + +// src/submodules/protocols/requestBuilder.ts +function requestBuilder(input, context) { + return new RequestBuilder(input, context); +} +var RequestBuilder = class { + constructor(input, context) { + this.input = input; + this.context = context; + this.query = {}; + this.method = ""; + this.headers = {}; + this.path = ""; + this.body = null; + this.hostname = ""; + this.resolvePathStack = []; + } + async build() { + const { hostname, protocol = "https", port, path: basePath } = await this.context.endpoint(); + this.path = basePath; + for (const resolvePath of this.resolvePathStack) { + resolvePath(this.path); + } + return new import_protocol_http.HttpRequest({ + protocol, + hostname: this.hostname || hostname, + port, + method: this.method, + path: this.path, + query: this.query, + body: this.body, + headers: this.headers + }); + } + /** + * Brevity setter for "hostname". + */ + hn(hostname) { + this.hostname = hostname; + return this; + } + /** + * Brevity initial builder for "basepath". + */ + bp(uriLabel) { + this.resolvePathStack.push((basePath) => { + this.path = `${(basePath == null ? void 0 : basePath.endsWith("/")) ? basePath.slice(0, -1) : basePath || ""}` + uriLabel; + }); + return this; + } + /** + * Brevity incremental builder for "path". + */ + p(memberName, labelValueProvider, uriLabel, isGreedyLabel) { + this.resolvePathStack.push((path) => { + this.path = resolvedPath(path, this.input, memberName, labelValueProvider, uriLabel, isGreedyLabel); + }); + return this; + } + /** + * Brevity setter for "headers". + */ + h(headers) { + this.headers = headers; + return this; + } + /** + * Brevity setter for "query". + */ + q(query) { + this.query = query; + return this; + } + /** + * Brevity setter for "body". + */ + b(body) { + this.body = body; + return this; + } + /** + * Brevity setter for "method". + */ + m(method) { + this.method = method; + return this; + } +}; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); + + +/***/ }), + +/***/ 566: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + DEFAULT_MAX_RETRIES: () => DEFAULT_MAX_RETRIES, + DEFAULT_TIMEOUT: () => DEFAULT_TIMEOUT, + ENV_CMDS_AUTH_TOKEN: () => ENV_CMDS_AUTH_TOKEN, + ENV_CMDS_FULL_URI: () => ENV_CMDS_FULL_URI, + ENV_CMDS_RELATIVE_URI: () => ENV_CMDS_RELATIVE_URI, + Endpoint: () => Endpoint, + fromContainerMetadata: () => fromContainerMetadata, + fromInstanceMetadata: () => fromInstanceMetadata, + getInstanceMetadataEndpoint: () => getInstanceMetadataEndpoint, + httpRequest: () => httpRequest, + providerConfigFromInit: () => providerConfigFromInit +}); +module.exports = __toCommonJS(src_exports); + +// src/fromContainerMetadata.ts + +var import_url = __nccwpck_require__(7016); + +// src/remoteProvider/httpRequest.ts +var import_property_provider = __nccwpck_require__(1238); +var import_buffer = __nccwpck_require__(181); +var import_http = __nccwpck_require__(8611); +function httpRequest(options) { + return new Promise((resolve, reject) => { + var _a; + const req = (0, import_http.request)({ + method: "GET", + ...options, + // Node.js http module doesn't accept hostname with square brackets + // Refs: https://github.com/nodejs/node/issues/39738 + hostname: (_a = options.hostname) == null ? void 0 : _a.replace(/^\[(.+)\]$/, "$1") + }); + req.on("error", (err) => { + reject(Object.assign(new import_property_provider.ProviderError("Unable to connect to instance metadata service"), err)); + req.destroy(); + }); + req.on("timeout", () => { + reject(new import_property_provider.ProviderError("TimeoutError from instance metadata service")); + req.destroy(); + }); + req.on("response", (res) => { + const { statusCode = 400 } = res; + if (statusCode < 200 || 300 <= statusCode) { + reject( + Object.assign(new import_property_provider.ProviderError("Error response received from instance metadata service"), { statusCode }) + ); + req.destroy(); + } + const chunks = []; + res.on("data", (chunk) => { + chunks.push(chunk); + }); + res.on("end", () => { + resolve(import_buffer.Buffer.concat(chunks)); + req.destroy(); + }); + }); + req.end(); + }); +} +__name(httpRequest, "httpRequest"); + +// src/remoteProvider/ImdsCredentials.ts +var isImdsCredentials = /* @__PURE__ */ __name((arg) => Boolean(arg) && typeof arg === "object" && typeof arg.AccessKeyId === "string" && typeof arg.SecretAccessKey === "string" && typeof arg.Token === "string" && typeof arg.Expiration === "string", "isImdsCredentials"); +var fromImdsCredentials = /* @__PURE__ */ __name((creds) => ({ + accessKeyId: creds.AccessKeyId, + secretAccessKey: creds.SecretAccessKey, + sessionToken: creds.Token, + expiration: new Date(creds.Expiration), + ...creds.AccountId && { accountId: creds.AccountId } +}), "fromImdsCredentials"); + +// src/remoteProvider/RemoteProviderInit.ts +var DEFAULT_TIMEOUT = 1e3; +var DEFAULT_MAX_RETRIES = 0; +var providerConfigFromInit = /* @__PURE__ */ __name(({ + maxRetries = DEFAULT_MAX_RETRIES, + timeout = DEFAULT_TIMEOUT +}) => ({ maxRetries, timeout }), "providerConfigFromInit"); + +// src/remoteProvider/retry.ts +var retry = /* @__PURE__ */ __name((toRetry, maxRetries) => { + let promise = toRetry(); + for (let i = 0; i < maxRetries; i++) { + promise = promise.catch(toRetry); + } + return promise; +}, "retry"); + +// src/fromContainerMetadata.ts +var ENV_CMDS_FULL_URI = "AWS_CONTAINER_CREDENTIALS_FULL_URI"; +var ENV_CMDS_RELATIVE_URI = "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI"; +var ENV_CMDS_AUTH_TOKEN = "AWS_CONTAINER_AUTHORIZATION_TOKEN"; +var fromContainerMetadata = /* @__PURE__ */ __name((init = {}) => { + const { timeout, maxRetries } = providerConfigFromInit(init); + return () => retry(async () => { + const requestOptions = await getCmdsUri({ logger: init.logger }); + const credsResponse = JSON.parse(await requestFromEcsImds(timeout, requestOptions)); + if (!isImdsCredentials(credsResponse)) { + throw new import_property_provider.CredentialsProviderError("Invalid response received from instance metadata service.", { + logger: init.logger + }); + } + return fromImdsCredentials(credsResponse); + }, maxRetries); +}, "fromContainerMetadata"); +var requestFromEcsImds = /* @__PURE__ */ __name(async (timeout, options) => { + if (process.env[ENV_CMDS_AUTH_TOKEN]) { + options.headers = { + ...options.headers, + Authorization: process.env[ENV_CMDS_AUTH_TOKEN] + }; + } + const buffer = await httpRequest({ + ...options, + timeout + }); + return buffer.toString(); +}, "requestFromEcsImds"); +var CMDS_IP = "169.254.170.2"; +var GREENGRASS_HOSTS = { + localhost: true, + "127.0.0.1": true +}; +var GREENGRASS_PROTOCOLS = { + "http:": true, + "https:": true +}; +var getCmdsUri = /* @__PURE__ */ __name(async ({ logger }) => { + if (process.env[ENV_CMDS_RELATIVE_URI]) { + return { + hostname: CMDS_IP, + path: process.env[ENV_CMDS_RELATIVE_URI] + }; + } + if (process.env[ENV_CMDS_FULL_URI]) { + const parsed = (0, import_url.parse)(process.env[ENV_CMDS_FULL_URI]); + if (!parsed.hostname || !(parsed.hostname in GREENGRASS_HOSTS)) { + throw new import_property_provider.CredentialsProviderError(`${parsed.hostname} is not a valid container metadata service hostname`, { + tryNextLink: false, + logger + }); + } + if (!parsed.protocol || !(parsed.protocol in GREENGRASS_PROTOCOLS)) { + throw new import_property_provider.CredentialsProviderError(`${parsed.protocol} is not a valid container metadata service protocol`, { + tryNextLink: false, + logger + }); + } + return { + ...parsed, + port: parsed.port ? parseInt(parsed.port, 10) : void 0 + }; + } + throw new import_property_provider.CredentialsProviderError( + `The container metadata credential provider cannot be used unless the ${ENV_CMDS_RELATIVE_URI} or ${ENV_CMDS_FULL_URI} environment variable is set`, + { + tryNextLink: false, + logger + } + ); +}, "getCmdsUri"); + +// src/fromInstanceMetadata.ts + + + +// src/error/InstanceMetadataV1FallbackError.ts + +var _InstanceMetadataV1FallbackError = class _InstanceMetadataV1FallbackError extends import_property_provider.CredentialsProviderError { + constructor(message, tryNextLink = true) { + super(message, tryNextLink); + this.tryNextLink = tryNextLink; + this.name = "InstanceMetadataV1FallbackError"; + Object.setPrototypeOf(this, _InstanceMetadataV1FallbackError.prototype); + } +}; +__name(_InstanceMetadataV1FallbackError, "InstanceMetadataV1FallbackError"); +var InstanceMetadataV1FallbackError = _InstanceMetadataV1FallbackError; + +// src/utils/getInstanceMetadataEndpoint.ts +var import_node_config_provider = __nccwpck_require__(5704); +var import_url_parser = __nccwpck_require__(4494); + +// src/config/Endpoint.ts +var Endpoint = /* @__PURE__ */ ((Endpoint2) => { + Endpoint2["IPv4"] = "http://169.254.169.254"; + Endpoint2["IPv6"] = "http://[fd00:ec2::254]"; + return Endpoint2; +})(Endpoint || {}); + +// src/config/EndpointConfigOptions.ts +var ENV_ENDPOINT_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT"; +var CONFIG_ENDPOINT_NAME = "ec2_metadata_service_endpoint"; +var ENDPOINT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_ENDPOINT_NAME], + configFileSelector: (profile) => profile[CONFIG_ENDPOINT_NAME], + default: void 0 +}; + +// src/config/EndpointMode.ts +var EndpointMode = /* @__PURE__ */ ((EndpointMode2) => { + EndpointMode2["IPv4"] = "IPv4"; + EndpointMode2["IPv6"] = "IPv6"; + return EndpointMode2; +})(EndpointMode || {}); + +// src/config/EndpointModeConfigOptions.ts +var ENV_ENDPOINT_MODE_NAME = "AWS_EC2_METADATA_SERVICE_ENDPOINT_MODE"; +var CONFIG_ENDPOINT_MODE_NAME = "ec2_metadata_service_endpoint_mode"; +var ENDPOINT_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_ENDPOINT_MODE_NAME], + configFileSelector: (profile) => profile[CONFIG_ENDPOINT_MODE_NAME], + default: "IPv4" /* IPv4 */ +}; + +// src/utils/getInstanceMetadataEndpoint.ts +var getInstanceMetadataEndpoint = /* @__PURE__ */ __name(async () => (0, import_url_parser.parseUrl)(await getFromEndpointConfig() || await getFromEndpointModeConfig()), "getInstanceMetadataEndpoint"); +var getFromEndpointConfig = /* @__PURE__ */ __name(async () => (0, import_node_config_provider.loadConfig)(ENDPOINT_CONFIG_OPTIONS)(), "getFromEndpointConfig"); +var getFromEndpointModeConfig = /* @__PURE__ */ __name(async () => { + const endpointMode = await (0, import_node_config_provider.loadConfig)(ENDPOINT_MODE_CONFIG_OPTIONS)(); + switch (endpointMode) { + case "IPv4" /* IPv4 */: + return "http://169.254.169.254" /* IPv4 */; + case "IPv6" /* IPv6 */: + return "http://[fd00:ec2::254]" /* IPv6 */; + default: + throw new Error(`Unsupported endpoint mode: ${endpointMode}. Select from ${Object.values(EndpointMode)}`); + } +}, "getFromEndpointModeConfig"); + +// src/utils/getExtendedInstanceMetadataCredentials.ts +var STATIC_STABILITY_REFRESH_INTERVAL_SECONDS = 5 * 60; +var STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS = 5 * 60; +var STATIC_STABILITY_DOC_URL = "https://docs.aws.amazon.com/sdkref/latest/guide/feature-static-credentials.html"; +var getExtendedInstanceMetadataCredentials = /* @__PURE__ */ __name((credentials, logger) => { + const refreshInterval = STATIC_STABILITY_REFRESH_INTERVAL_SECONDS + Math.floor(Math.random() * STATIC_STABILITY_REFRESH_INTERVAL_JITTER_WINDOW_SECONDS); + const newExpiration = new Date(Date.now() + refreshInterval * 1e3); + logger.warn( + `Attempting credential expiration extension due to a credential service availability issue. A refresh of these credentials will be attempted after ${new Date(newExpiration)}. +For more information, please visit: ` + STATIC_STABILITY_DOC_URL + ); + const originalExpiration = credentials.originalExpiration ?? credentials.expiration; + return { + ...credentials, + ...originalExpiration ? { originalExpiration } : {}, + expiration: newExpiration + }; +}, "getExtendedInstanceMetadataCredentials"); + +// src/utils/staticStabilityProvider.ts +var staticStabilityProvider = /* @__PURE__ */ __name((provider, options = {}) => { + const logger = (options == null ? void 0 : options.logger) || console; + let pastCredentials; + return async () => { + let credentials; + try { + credentials = await provider(); + if (credentials.expiration && credentials.expiration.getTime() < Date.now()) { + credentials = getExtendedInstanceMetadataCredentials(credentials, logger); + } + } catch (e) { + if (pastCredentials) { + logger.warn("Credential renew failed: ", e); + credentials = getExtendedInstanceMetadataCredentials(pastCredentials, logger); + } else { + throw e; + } + } + pastCredentials = credentials; + return credentials; + }; +}, "staticStabilityProvider"); + +// src/fromInstanceMetadata.ts +var IMDS_PATH = "/latest/meta-data/iam/security-credentials/"; +var IMDS_TOKEN_PATH = "/latest/api/token"; +var AWS_EC2_METADATA_V1_DISABLED = "AWS_EC2_METADATA_V1_DISABLED"; +var PROFILE_AWS_EC2_METADATA_V1_DISABLED = "ec2_metadata_v1_disabled"; +var X_AWS_EC2_METADATA_TOKEN = "x-aws-ec2-metadata-token"; +var fromInstanceMetadata = /* @__PURE__ */ __name((init = {}) => staticStabilityProvider(getInstanceMetadataProvider(init), { logger: init.logger }), "fromInstanceMetadata"); +var getInstanceMetadataProvider = /* @__PURE__ */ __name((init = {}) => { + let disableFetchToken = false; + const { logger, profile } = init; + const { timeout, maxRetries } = providerConfigFromInit(init); + const getCredentials = /* @__PURE__ */ __name(async (maxRetries2, options) => { + var _a; + const isImdsV1Fallback = disableFetchToken || ((_a = options.headers) == null ? void 0 : _a[X_AWS_EC2_METADATA_TOKEN]) == null; + if (isImdsV1Fallback) { + let fallbackBlockedFromProfile = false; + let fallbackBlockedFromProcessEnv = false; + const configValue = await (0, import_node_config_provider.loadConfig)( + { + environmentVariableSelector: (env) => { + const envValue = env[AWS_EC2_METADATA_V1_DISABLED]; + fallbackBlockedFromProcessEnv = !!envValue && envValue !== "false"; + if (envValue === void 0) { + throw new import_property_provider.CredentialsProviderError( + `${AWS_EC2_METADATA_V1_DISABLED} not set in env, checking config file next.`, + { logger: init.logger } + ); + } + return fallbackBlockedFromProcessEnv; + }, + configFileSelector: (profile2) => { + const profileValue = profile2[PROFILE_AWS_EC2_METADATA_V1_DISABLED]; + fallbackBlockedFromProfile = !!profileValue && profileValue !== "false"; + return fallbackBlockedFromProfile; + }, + default: false + }, + { + profile + } + )(); + if (init.ec2MetadataV1Disabled || configValue) { + const causes = []; + if (init.ec2MetadataV1Disabled) + causes.push("credential provider initialization (runtime option ec2MetadataV1Disabled)"); + if (fallbackBlockedFromProfile) + causes.push(`config file profile (${PROFILE_AWS_EC2_METADATA_V1_DISABLED})`); + if (fallbackBlockedFromProcessEnv) + causes.push(`process environment variable (${AWS_EC2_METADATA_V1_DISABLED})`); + throw new InstanceMetadataV1FallbackError( + `AWS EC2 Metadata v1 fallback has been blocked by AWS SDK configuration in the following: [${causes.join( + ", " + )}].` + ); + } + } + const imdsProfile = (await retry(async () => { + let profile2; + try { + profile2 = await getProfile(options); + } catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return profile2; + }, maxRetries2)).trim(); + return retry(async () => { + let creds; + try { + creds = await getCredentialsFromProfile(imdsProfile, options, init); + } catch (err) { + if (err.statusCode === 401) { + disableFetchToken = false; + } + throw err; + } + return creds; + }, maxRetries2); + }, "getCredentials"); + return async () => { + const endpoint = await getInstanceMetadataEndpoint(); + if (disableFetchToken) { + logger == null ? void 0 : logger.debug("AWS SDK Instance Metadata", "using v1 fallback (no token fetch)"); + return getCredentials(maxRetries, { ...endpoint, timeout }); + } else { + let token; + try { + token = (await getMetadataToken({ ...endpoint, timeout })).toString(); + } catch (error) { + if ((error == null ? void 0 : error.statusCode) === 400) { + throw Object.assign(error, { + message: "EC2 Metadata token request returned error" + }); + } else if (error.message === "TimeoutError" || [403, 404, 405].includes(error.statusCode)) { + disableFetchToken = true; + } + logger == null ? void 0 : logger.debug("AWS SDK Instance Metadata", "using v1 fallback (initial)"); + return getCredentials(maxRetries, { ...endpoint, timeout }); + } + return getCredentials(maxRetries, { + ...endpoint, + headers: { + [X_AWS_EC2_METADATA_TOKEN]: token + }, + timeout + }); + } + }; +}, "getInstanceMetadataProvider"); +var getMetadataToken = /* @__PURE__ */ __name(async (options) => httpRequest({ + ...options, + path: IMDS_TOKEN_PATH, + method: "PUT", + headers: { + "x-aws-ec2-metadata-token-ttl-seconds": "21600" + } +}), "getMetadataToken"); +var getProfile = /* @__PURE__ */ __name(async (options) => (await httpRequest({ ...options, path: IMDS_PATH })).toString(), "getProfile"); +var getCredentialsFromProfile = /* @__PURE__ */ __name(async (profile, options, init) => { + const credentialsResponse = JSON.parse( + (await httpRequest({ + ...options, + path: IMDS_PATH + profile + })).toString() + ); + if (!isImdsCredentials(credentialsResponse)) { + throw new import_property_provider.CredentialsProviderError("Invalid response received from instance metadata service.", { + logger: init.logger + }); + } + return fromImdsCredentials(credentialsResponse); +}, "getCredentialsFromProfile"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 7809: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + FetchHttpHandler: () => FetchHttpHandler, + keepAliveSupport: () => keepAliveSupport, + streamCollector: () => streamCollector +}); +module.exports = __toCommonJS(src_exports); + +// src/fetch-http-handler.ts +var import_protocol_http = __nccwpck_require__(2356); +var import_querystring_builder = __nccwpck_require__(8256); + +// src/create-request.ts +function createRequest(url, requestOptions) { + return new Request(url, requestOptions); +} +__name(createRequest, "createRequest"); + +// src/request-timeout.ts +function requestTimeout(timeoutInMs = 0) { + return new Promise((resolve, reject) => { + if (timeoutInMs) { + setTimeout(() => { + const timeoutError = new Error(`Request did not complete within ${timeoutInMs} ms`); + timeoutError.name = "TimeoutError"; + reject(timeoutError); + }, timeoutInMs); + } + }); +} +__name(requestTimeout, "requestTimeout"); + +// src/fetch-http-handler.ts +var keepAliveSupport = { + supported: void 0 +}; +var _FetchHttpHandler = class _FetchHttpHandler { + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions) { + if (typeof (instanceOrOptions == null ? void 0 : instanceOrOptions.handle) === "function") { + return instanceOrOptions; + } + return new _FetchHttpHandler(instanceOrOptions); + } + constructor(options) { + if (typeof options === "function") { + this.configProvider = options().then((opts) => opts || {}); + } else { + this.config = options ?? {}; + this.configProvider = Promise.resolve(this.config); + } + if (keepAliveSupport.supported === void 0) { + keepAliveSupport.supported = Boolean( + typeof Request !== "undefined" && "keepalive" in createRequest("https://[::1]") + ); + } + } + destroy() { + } + async handle(request, { abortSignal } = {}) { + var _a; + if (!this.config) { + this.config = await this.configProvider; + } + const requestTimeoutInMs = this.config.requestTimeout; + const keepAlive = this.config.keepAlive === true; + const credentials = this.config.credentials; + if (abortSignal == null ? void 0 : abortSignal.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + return Promise.reject(abortError); + } + let path = request.path; + const queryString = (0, import_querystring_builder.buildQueryString)(request.query || {}); + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + let auth = ""; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}@`; + } + const { port, method } = request; + const url = `${request.protocol}//${auth}${request.hostname}${port ? `:${port}` : ""}${path}`; + const body = method === "GET" || method === "HEAD" ? void 0 : request.body; + const requestOptions = { + body, + headers: new Headers(request.headers), + method, + credentials + }; + if ((_a = this.config) == null ? void 0 : _a.cache) { + requestOptions.cache = this.config.cache; + } + if (body) { + requestOptions.duplex = "half"; + } + if (typeof AbortController !== "undefined") { + requestOptions.signal = abortSignal; + } + if (keepAliveSupport.supported) { + requestOptions.keepalive = keepAlive; + } + if (typeof this.config.requestInit === "function") { + Object.assign(requestOptions, this.config.requestInit(request)); + } + let removeSignalEventListener = /* @__PURE__ */ __name(() => { + }, "removeSignalEventListener"); + const fetchRequest = createRequest(url, requestOptions); + const raceOfPromises = [ + fetch(fetchRequest).then((response) => { + const fetchHeaders = response.headers; + const transformedHeaders = {}; + for (const pair of fetchHeaders.entries()) { + transformedHeaders[pair[0]] = pair[1]; + } + const hasReadableStream = response.body != void 0; + if (!hasReadableStream) { + return response.blob().then((body2) => ({ + response: new import_protocol_http.HttpResponse({ + headers: transformedHeaders, + reason: response.statusText, + statusCode: response.status, + body: body2 + }) + })); + } + return { + response: new import_protocol_http.HttpResponse({ + headers: transformedHeaders, + reason: response.statusText, + statusCode: response.status, + body: response.body + }) + }; + }), + requestTimeout(requestTimeoutInMs) + ]; + if (abortSignal) { + raceOfPromises.push( + new Promise((resolve, reject) => { + const onAbort = /* @__PURE__ */ __name(() => { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }, "onAbort"); + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + removeSignalEventListener = /* @__PURE__ */ __name(() => signal.removeEventListener("abort", onAbort), "removeSignalEventListener"); + } else { + abortSignal.onabort = onAbort; + } + }) + ); + } + return Promise.race(raceOfPromises).finally(removeSignalEventListener); + } + updateHttpClientConfig(key, value) { + this.config = void 0; + this.configProvider = this.configProvider.then((config) => { + config[key] = value; + return config; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } +}; +__name(_FetchHttpHandler, "FetchHttpHandler"); +var FetchHttpHandler = _FetchHttpHandler; + +// src/stream-collector.ts +var streamCollector = /* @__PURE__ */ __name(async (stream) => { + var _a; + if (typeof Blob === "function" && stream instanceof Blob || ((_a = stream.constructor) == null ? void 0 : _a.name) === "Blob") { + return new Uint8Array(await stream.arrayBuffer()); + } + return collectStream(stream); +}, "streamCollector"); +async function collectStream(stream) { + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + let length = 0; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + length += value.length; + } + isDone = done; + } + const collected = new Uint8Array(length); + let offset = 0; + for (const chunk of chunks) { + collected.set(chunk, offset); + offset += chunk.length; + } + return collected; +} +__name(collectStream, "collectStream"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 5092: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Hash: () => Hash +}); +module.exports = __toCommonJS(src_exports); +var import_util_buffer_from = __nccwpck_require__(4151); +var import_util_utf8 = __nccwpck_require__(1577); +var import_buffer = __nccwpck_require__(181); +var import_crypto = __nccwpck_require__(6982); +var _Hash = class _Hash { + constructor(algorithmIdentifier, secret) { + this.algorithmIdentifier = algorithmIdentifier; + this.secret = secret; + this.reset(); + } + update(toHash, encoding) { + this.hash.update((0, import_util_utf8.toUint8Array)(castSourceData(toHash, encoding))); + } + digest() { + return Promise.resolve(this.hash.digest()); + } + reset() { + this.hash = this.secret ? (0, import_crypto.createHmac)(this.algorithmIdentifier, castSourceData(this.secret)) : (0, import_crypto.createHash)(this.algorithmIdentifier); + } +}; +__name(_Hash, "Hash"); +var Hash = _Hash; +function castSourceData(toCast, encoding) { + if (import_buffer.Buffer.isBuffer(toCast)) { + return toCast; + } + if (typeof toCast === "string") { + return (0, import_util_buffer_from.fromString)(toCast, encoding); + } + if (ArrayBuffer.isView(toCast)) { + return (0, import_util_buffer_from.fromArrayBuffer)(toCast.buffer, toCast.byteOffset, toCast.byteLength); + } + return (0, import_util_buffer_from.fromArrayBuffer)(toCast); +} +__name(castSourceData, "castSourceData"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 6130: +/***/ ((module) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + isArrayBuffer: () => isArrayBuffer +}); +module.exports = __toCommonJS(src_exports); +var isArrayBuffer = /* @__PURE__ */ __name((arg) => typeof ArrayBuffer === "function" && arg instanceof ArrayBuffer || Object.prototype.toString.call(arg) === "[object ArrayBuffer]", "isArrayBuffer"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 7212: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + contentLengthMiddleware: () => contentLengthMiddleware, + contentLengthMiddlewareOptions: () => contentLengthMiddlewareOptions, + getContentLengthPlugin: () => getContentLengthPlugin +}); +module.exports = __toCommonJS(src_exports); +var import_protocol_http = __nccwpck_require__(2356); +var CONTENT_LENGTH_HEADER = "content-length"; +function contentLengthMiddleware(bodyLengthChecker) { + return (next) => async (args) => { + const request = args.request; + if (import_protocol_http.HttpRequest.isInstance(request)) { + const { body, headers } = request; + if (body && Object.keys(headers).map((str) => str.toLowerCase()).indexOf(CONTENT_LENGTH_HEADER) === -1) { + try { + const length = bodyLengthChecker(body); + request.headers = { + ...request.headers, + [CONTENT_LENGTH_HEADER]: String(length) + }; + } catch (error) { + } + } + } + return next({ + ...args, + request + }); + }; +} +__name(contentLengthMiddleware, "contentLengthMiddleware"); +var contentLengthMiddlewareOptions = { + step: "build", + tags: ["SET_CONTENT_LENGTH", "CONTENT_LENGTH"], + name: "contentLengthMiddleware", + override: true +}; +var getContentLengthPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: (clientStack) => { + clientStack.add(contentLengthMiddleware(options.bodyLengthChecker), contentLengthMiddlewareOptions); + } +}), "getContentLengthPlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 6041: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEndpointFromConfig = void 0; +const node_config_provider_1 = __nccwpck_require__(5704); +const getEndpointUrlConfig_1 = __nccwpck_require__(8008); +const getEndpointFromConfig = async (serviceId) => (0, node_config_provider_1.loadConfig)((0, getEndpointUrlConfig_1.getEndpointUrlConfig)(serviceId !== null && serviceId !== void 0 ? serviceId : ""))(); +exports.getEndpointFromConfig = getEndpointFromConfig; + + +/***/ }), + +/***/ 8008: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getEndpointUrlConfig = void 0; +const shared_ini_file_loader_1 = __nccwpck_require__(4964); +const ENV_ENDPOINT_URL = "AWS_ENDPOINT_URL"; +const CONFIG_ENDPOINT_URL = "endpoint_url"; +const getEndpointUrlConfig = (serviceId) => ({ + environmentVariableSelector: (env) => { + const serviceSuffixParts = serviceId.split(" ").map((w) => w.toUpperCase()); + const serviceEndpointUrl = env[[ENV_ENDPOINT_URL, ...serviceSuffixParts].join("_")]; + if (serviceEndpointUrl) + return serviceEndpointUrl; + const endpointUrl = env[ENV_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + configFileSelector: (profile, config) => { + if (config && profile.services) { + const servicesSection = config[["services", profile.services].join(shared_ini_file_loader_1.CONFIG_PREFIX_SEPARATOR)]; + if (servicesSection) { + const servicePrefixParts = serviceId.split(" ").map((w) => w.toLowerCase()); + const endpointUrl = servicesSection[[servicePrefixParts.join("_"), CONFIG_ENDPOINT_URL].join(shared_ini_file_loader_1.CONFIG_PREFIX_SEPARATOR)]; + if (endpointUrl) + return endpointUrl; + } + } + const endpointUrl = profile[CONFIG_ENDPOINT_URL]; + if (endpointUrl) + return endpointUrl; + return undefined; + }, + default: undefined, +}); +exports.getEndpointUrlConfig = getEndpointUrlConfig; + + +/***/ }), + +/***/ 99: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + endpointMiddleware: () => endpointMiddleware, + endpointMiddlewareOptions: () => endpointMiddlewareOptions, + getEndpointFromInstructions: () => getEndpointFromInstructions, + getEndpointPlugin: () => getEndpointPlugin, + resolveEndpointConfig: () => resolveEndpointConfig, + resolveParams: () => resolveParams, + toEndpointV1: () => toEndpointV1 +}); +module.exports = __toCommonJS(src_exports); + +// src/service-customizations/s3.ts +var resolveParamsForS3 = /* @__PURE__ */ __name(async (endpointParams) => { + const bucket = (endpointParams == null ? void 0 : endpointParams.Bucket) || ""; + if (typeof endpointParams.Bucket === "string") { + endpointParams.Bucket = bucket.replace(/#/g, encodeURIComponent("#")).replace(/\?/g, encodeURIComponent("?")); + } + if (isArnBucketName(bucket)) { + if (endpointParams.ForcePathStyle === true) { + throw new Error("Path-style addressing cannot be used with ARN buckets"); + } + } else if (!isDnsCompatibleBucketName(bucket) || bucket.indexOf(".") !== -1 && !String(endpointParams.Endpoint).startsWith("http:") || bucket.toLowerCase() !== bucket || bucket.length < 3) { + endpointParams.ForcePathStyle = true; + } + if (endpointParams.DisableMultiRegionAccessPoints) { + endpointParams.disableMultiRegionAccessPoints = true; + endpointParams.DisableMRAP = true; + } + return endpointParams; +}, "resolveParamsForS3"); +var DOMAIN_PATTERN = /^[a-z0-9][a-z0-9\.\-]{1,61}[a-z0-9]$/; +var IP_ADDRESS_PATTERN = /(\d+\.){3}\d+/; +var DOTS_PATTERN = /\.\./; +var isDnsCompatibleBucketName = /* @__PURE__ */ __name((bucketName) => DOMAIN_PATTERN.test(bucketName) && !IP_ADDRESS_PATTERN.test(bucketName) && !DOTS_PATTERN.test(bucketName), "isDnsCompatibleBucketName"); +var isArnBucketName = /* @__PURE__ */ __name((bucketName) => { + const [arn, partition, service, , , bucket] = bucketName.split(":"); + const isArn = arn === "arn" && bucketName.split(":").length >= 6; + const isValidArn = Boolean(isArn && partition && service && bucket); + if (isArn && !isValidArn) { + throw new Error(`Invalid ARN: ${bucketName} was an invalid ARN.`); + } + return isValidArn; +}, "isArnBucketName"); + +// src/adaptors/createConfigValueProvider.ts +var createConfigValueProvider = /* @__PURE__ */ __name((configKey, canonicalEndpointParamKey, config) => { + const configProvider = /* @__PURE__ */ __name(async () => { + const configValue = config[configKey] ?? config[canonicalEndpointParamKey]; + if (typeof configValue === "function") { + return configValue(); + } + return configValue; + }, "configProvider"); + if (configKey === "credentialScope" || canonicalEndpointParamKey === "CredentialScope") { + return async () => { + const credentials = typeof config.credentials === "function" ? await config.credentials() : config.credentials; + const configValue = (credentials == null ? void 0 : credentials.credentialScope) ?? (credentials == null ? void 0 : credentials.CredentialScope); + return configValue; + }; + } + if (configKey === "accountId" || canonicalEndpointParamKey === "AccountId") { + return async () => { + const credentials = typeof config.credentials === "function" ? await config.credentials() : config.credentials; + const configValue = (credentials == null ? void 0 : credentials.accountId) ?? (credentials == null ? void 0 : credentials.AccountId); + return configValue; + }; + } + if (configKey === "endpoint" || canonicalEndpointParamKey === "endpoint") { + return async () => { + const endpoint = await configProvider(); + if (endpoint && typeof endpoint === "object") { + if ("url" in endpoint) { + return endpoint.url.href; + } + if ("hostname" in endpoint) { + const { protocol, hostname, port, path } = endpoint; + return `${protocol}//${hostname}${port ? ":" + port : ""}${path}`; + } + } + return endpoint; + }; + } + return configProvider; +}, "createConfigValueProvider"); + +// src/adaptors/getEndpointFromInstructions.ts +var import_getEndpointFromConfig = __nccwpck_require__(6041); + +// src/adaptors/toEndpointV1.ts +var import_url_parser = __nccwpck_require__(4494); +var toEndpointV1 = /* @__PURE__ */ __name((endpoint) => { + if (typeof endpoint === "object") { + if ("url" in endpoint) { + return (0, import_url_parser.parseUrl)(endpoint.url); + } + return endpoint; + } + return (0, import_url_parser.parseUrl)(endpoint); +}, "toEndpointV1"); + +// src/adaptors/getEndpointFromInstructions.ts +var getEndpointFromInstructions = /* @__PURE__ */ __name(async (commandInput, instructionsSupplier, clientConfig, context) => { + if (!clientConfig.endpoint) { + let endpointFromConfig; + if (clientConfig.serviceConfiguredEndpoint) { + endpointFromConfig = await clientConfig.serviceConfiguredEndpoint(); + } else { + endpointFromConfig = await (0, import_getEndpointFromConfig.getEndpointFromConfig)(clientConfig.serviceId); + } + if (endpointFromConfig) { + clientConfig.endpoint = () => Promise.resolve(toEndpointV1(endpointFromConfig)); + } + } + const endpointParams = await resolveParams(commandInput, instructionsSupplier, clientConfig); + if (typeof clientConfig.endpointProvider !== "function") { + throw new Error("config.endpointProvider is not set."); + } + const endpoint = clientConfig.endpointProvider(endpointParams, context); + return endpoint; +}, "getEndpointFromInstructions"); +var resolveParams = /* @__PURE__ */ __name(async (commandInput, instructionsSupplier, clientConfig) => { + var _a; + const endpointParams = {}; + const instructions = ((_a = instructionsSupplier == null ? void 0 : instructionsSupplier.getEndpointParameterInstructions) == null ? void 0 : _a.call(instructionsSupplier)) || {}; + for (const [name, instruction] of Object.entries(instructions)) { + switch (instruction.type) { + case "staticContextParams": + endpointParams[name] = instruction.value; + break; + case "contextParams": + endpointParams[name] = commandInput[instruction.name]; + break; + case "clientContextParams": + case "builtInParams": + endpointParams[name] = await createConfigValueProvider(instruction.name, name, clientConfig)(); + break; + case "operationContextParams": + endpointParams[name] = instruction.get(commandInput); + break; + default: + throw new Error("Unrecognized endpoint parameter instruction: " + JSON.stringify(instruction)); + } + } + if (Object.keys(instructions).length === 0) { + Object.assign(endpointParams, clientConfig); + } + if (String(clientConfig.serviceId).toLowerCase() === "s3") { + await resolveParamsForS3(endpointParams); + } + return endpointParams; +}, "resolveParams"); + +// src/endpointMiddleware.ts +var import_core = __nccwpck_require__(402); +var import_util_middleware = __nccwpck_require__(6324); +var endpointMiddleware = /* @__PURE__ */ __name(({ + config, + instructions +}) => { + return (next, context) => async (args) => { + var _a, _b, _c; + if (config.endpoint) { + (0, import_core.setFeature)(context, "ENDPOINT_OVERRIDE", "N"); + } + const endpoint = await getEndpointFromInstructions( + args.input, + { + getEndpointParameterInstructions() { + return instructions; + } + }, + { ...config }, + context + ); + context.endpointV2 = endpoint; + context.authSchemes = (_a = endpoint.properties) == null ? void 0 : _a.authSchemes; + const authScheme = (_b = context.authSchemes) == null ? void 0 : _b[0]; + if (authScheme) { + context["signing_region"] = authScheme.signingRegion; + context["signing_service"] = authScheme.signingName; + const smithyContext = (0, import_util_middleware.getSmithyContext)(context); + const httpAuthOption = (_c = smithyContext == null ? void 0 : smithyContext.selectedHttpAuthScheme) == null ? void 0 : _c.httpAuthOption; + if (httpAuthOption) { + httpAuthOption.signingProperties = Object.assign( + httpAuthOption.signingProperties || {}, + { + signing_region: authScheme.signingRegion, + signingRegion: authScheme.signingRegion, + signing_service: authScheme.signingName, + signingName: authScheme.signingName, + signingRegionSet: authScheme.signingRegionSet + }, + authScheme.properties + ); + } + } + return next({ + ...args + }); + }; +}, "endpointMiddleware"); + +// src/getEndpointPlugin.ts +var import_middleware_serde = __nccwpck_require__(3255); +var endpointMiddlewareOptions = { + step: "serialize", + tags: ["ENDPOINT_PARAMETERS", "ENDPOINT_V2", "ENDPOINT"], + name: "endpointV2Middleware", + override: true, + relation: "before", + toMiddleware: import_middleware_serde.serializerMiddlewareOption.name +}; +var getEndpointPlugin = /* @__PURE__ */ __name((config, instructions) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo( + endpointMiddleware({ + config, + instructions + }), + endpointMiddlewareOptions + ); + } +}), "getEndpointPlugin"); + +// src/resolveEndpointConfig.ts + +var import_getEndpointFromConfig2 = __nccwpck_require__(6041); +var resolveEndpointConfig = /* @__PURE__ */ __name((input) => { + const tls = input.tls ?? true; + const { endpoint } = input; + const customEndpointProvider = endpoint != null ? async () => toEndpointV1(await (0, import_util_middleware.normalizeProvider)(endpoint)()) : void 0; + const isCustomEndpoint = !!endpoint; + const resolvedConfig = { + ...input, + endpoint: customEndpointProvider, + tls, + isCustomEndpoint, + useDualstackEndpoint: (0, import_util_middleware.normalizeProvider)(input.useDualstackEndpoint ?? false), + useFipsEndpoint: (0, import_util_middleware.normalizeProvider)(input.useFipsEndpoint ?? false) + }; + let configuredEndpointPromise = void 0; + resolvedConfig.serviceConfiguredEndpoint = async () => { + if (input.serviceId && !configuredEndpointPromise) { + configuredEndpointPromise = (0, import_getEndpointFromConfig2.getEndpointFromConfig)(input.serviceId); + } + return configuredEndpointPromise; + }; + return resolvedConfig; +}, "resolveEndpointConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 9618: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AdaptiveRetryStrategy: () => AdaptiveRetryStrategy, + CONFIG_MAX_ATTEMPTS: () => CONFIG_MAX_ATTEMPTS, + CONFIG_RETRY_MODE: () => CONFIG_RETRY_MODE, + ENV_MAX_ATTEMPTS: () => ENV_MAX_ATTEMPTS, + ENV_RETRY_MODE: () => ENV_RETRY_MODE, + NODE_MAX_ATTEMPT_CONFIG_OPTIONS: () => NODE_MAX_ATTEMPT_CONFIG_OPTIONS, + NODE_RETRY_MODE_CONFIG_OPTIONS: () => NODE_RETRY_MODE_CONFIG_OPTIONS, + StandardRetryStrategy: () => StandardRetryStrategy, + defaultDelayDecider: () => defaultDelayDecider, + defaultRetryDecider: () => defaultRetryDecider, + getOmitRetryHeadersPlugin: () => getOmitRetryHeadersPlugin, + getRetryAfterHint: () => getRetryAfterHint, + getRetryPlugin: () => getRetryPlugin, + omitRetryHeadersMiddleware: () => omitRetryHeadersMiddleware, + omitRetryHeadersMiddlewareOptions: () => omitRetryHeadersMiddlewareOptions, + resolveRetryConfig: () => resolveRetryConfig, + retryMiddleware: () => retryMiddleware, + retryMiddlewareOptions: () => retryMiddlewareOptions +}); +module.exports = __toCommonJS(src_exports); + +// src/AdaptiveRetryStrategy.ts + + +// src/StandardRetryStrategy.ts +var import_protocol_http = __nccwpck_require__(2356); + + +var import_uuid = __nccwpck_require__(2048); + +// src/defaultRetryQuota.ts +var import_util_retry = __nccwpck_require__(5518); +var getDefaultRetryQuota = /* @__PURE__ */ __name((initialRetryTokens, options) => { + const MAX_CAPACITY = initialRetryTokens; + const noRetryIncrement = (options == null ? void 0 : options.noRetryIncrement) ?? import_util_retry.NO_RETRY_INCREMENT; + const retryCost = (options == null ? void 0 : options.retryCost) ?? import_util_retry.RETRY_COST; + const timeoutRetryCost = (options == null ? void 0 : options.timeoutRetryCost) ?? import_util_retry.TIMEOUT_RETRY_COST; + let availableCapacity = initialRetryTokens; + const getCapacityAmount = /* @__PURE__ */ __name((error) => error.name === "TimeoutError" ? timeoutRetryCost : retryCost, "getCapacityAmount"); + const hasRetryTokens = /* @__PURE__ */ __name((error) => getCapacityAmount(error) <= availableCapacity, "hasRetryTokens"); + const retrieveRetryTokens = /* @__PURE__ */ __name((error) => { + if (!hasRetryTokens(error)) { + throw new Error("No retry token available"); + } + const capacityAmount = getCapacityAmount(error); + availableCapacity -= capacityAmount; + return capacityAmount; + }, "retrieveRetryTokens"); + const releaseRetryTokens = /* @__PURE__ */ __name((capacityReleaseAmount) => { + availableCapacity += capacityReleaseAmount ?? noRetryIncrement; + availableCapacity = Math.min(availableCapacity, MAX_CAPACITY); + }, "releaseRetryTokens"); + return Object.freeze({ + hasRetryTokens, + retrieveRetryTokens, + releaseRetryTokens + }); +}, "getDefaultRetryQuota"); + +// src/delayDecider.ts + +var defaultDelayDecider = /* @__PURE__ */ __name((delayBase, attempts) => Math.floor(Math.min(import_util_retry.MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)), "defaultDelayDecider"); + +// src/retryDecider.ts +var import_service_error_classification = __nccwpck_require__(2058); +var defaultRetryDecider = /* @__PURE__ */ __name((error) => { + if (!error) { + return false; + } + return (0, import_service_error_classification.isRetryableByTrait)(error) || (0, import_service_error_classification.isClockSkewError)(error) || (0, import_service_error_classification.isThrottlingError)(error) || (0, import_service_error_classification.isTransientError)(error); +}, "defaultRetryDecider"); + +// src/util.ts +var asSdkError = /* @__PURE__ */ __name((error) => { + if (error instanceof Error) + return error; + if (error instanceof Object) + return Object.assign(new Error(), error); + if (typeof error === "string") + return new Error(error); + return new Error(`AWS SDK error wrapper for ${error}`); +}, "asSdkError"); + +// src/StandardRetryStrategy.ts +var _StandardRetryStrategy = class _StandardRetryStrategy { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = import_util_retry.RETRY_MODES.STANDARD; + this.retryDecider = (options == null ? void 0 : options.retryDecider) ?? defaultRetryDecider; + this.delayDecider = (options == null ? void 0 : options.delayDecider) ?? defaultDelayDecider; + this.retryQuota = (options == null ? void 0 : options.retryQuota) ?? getDefaultRetryQuota(import_util_retry.INITIAL_RETRY_TOKENS); + } + shouldRetry(error, attempts, maxAttempts) { + return attempts < maxAttempts && this.retryDecider(error) && this.retryQuota.hasRetryTokens(error); + } + async getMaxAttempts() { + let maxAttempts; + try { + maxAttempts = await this.maxAttemptsProvider(); + } catch (error) { + maxAttempts = import_util_retry.DEFAULT_MAX_ATTEMPTS; + } + return maxAttempts; + } + async retry(next, args, options) { + let retryTokenAmount; + let attempts = 0; + let totalDelay = 0; + const maxAttempts = await this.getMaxAttempts(); + const { request } = args; + if (import_protocol_http.HttpRequest.isInstance(request)) { + request.headers[import_util_retry.INVOCATION_ID_HEADER] = (0, import_uuid.v4)(); + } + while (true) { + try { + if (import_protocol_http.HttpRequest.isInstance(request)) { + request.headers[import_util_retry.REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + if (options == null ? void 0 : options.beforeRequest) { + await options.beforeRequest(); + } + const { response, output } = await next(args); + if (options == null ? void 0 : options.afterRequest) { + options.afterRequest(response); + } + this.retryQuota.releaseRetryTokens(retryTokenAmount); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalDelay; + return { response, output }; + } catch (e) { + const err = asSdkError(e); + attempts++; + if (this.shouldRetry(err, attempts, maxAttempts)) { + retryTokenAmount = this.retryQuota.retrieveRetryTokens(err); + const delayFromDecider = this.delayDecider( + (0, import_service_error_classification.isThrottlingError)(err) ? import_util_retry.THROTTLING_RETRY_DELAY_BASE : import_util_retry.DEFAULT_RETRY_DELAY_BASE, + attempts + ); + const delayFromResponse = getDelayFromRetryAfterHeader(err.$response); + const delay = Math.max(delayFromResponse || 0, delayFromDecider); + totalDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + continue; + } + if (!err.$metadata) { + err.$metadata = {}; + } + err.$metadata.attempts = attempts; + err.$metadata.totalRetryDelay = totalDelay; + throw err; + } + } + } +}; +__name(_StandardRetryStrategy, "StandardRetryStrategy"); +var StandardRetryStrategy = _StandardRetryStrategy; +var getDelayFromRetryAfterHeader = /* @__PURE__ */ __name((response) => { + if (!import_protocol_http.HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return retryAfterSeconds * 1e3; + const retryAfterDate = new Date(retryAfter); + return retryAfterDate.getTime() - Date.now(); +}, "getDelayFromRetryAfterHeader"); + +// src/AdaptiveRetryStrategy.ts +var _AdaptiveRetryStrategy = class _AdaptiveRetryStrategy extends StandardRetryStrategy { + constructor(maxAttemptsProvider, options) { + const { rateLimiter, ...superOptions } = options ?? {}; + super(maxAttemptsProvider, superOptions); + this.rateLimiter = rateLimiter ?? new import_util_retry.DefaultRateLimiter(); + this.mode = import_util_retry.RETRY_MODES.ADAPTIVE; + } + async retry(next, args) { + return super.retry(next, args, { + beforeRequest: async () => { + return this.rateLimiter.getSendToken(); + }, + afterRequest: (response) => { + this.rateLimiter.updateClientSendingRate(response); + } + }); + } +}; +__name(_AdaptiveRetryStrategy, "AdaptiveRetryStrategy"); +var AdaptiveRetryStrategy = _AdaptiveRetryStrategy; + +// src/configurations.ts +var import_util_middleware = __nccwpck_require__(6324); + +var ENV_MAX_ATTEMPTS = "AWS_MAX_ATTEMPTS"; +var CONFIG_MAX_ATTEMPTS = "max_attempts"; +var NODE_MAX_ATTEMPT_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + const value = env[ENV_MAX_ATTEMPTS]; + if (!value) + return void 0; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Environment variable ${ENV_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + configFileSelector: (profile) => { + const value = profile[CONFIG_MAX_ATTEMPTS]; + if (!value) + return void 0; + const maxAttempt = parseInt(value); + if (Number.isNaN(maxAttempt)) { + throw new Error(`Shared config file entry ${CONFIG_MAX_ATTEMPTS} mast be a number, got "${value}"`); + } + return maxAttempt; + }, + default: import_util_retry.DEFAULT_MAX_ATTEMPTS +}; +var resolveRetryConfig = /* @__PURE__ */ __name((input) => { + const { retryStrategy } = input; + const maxAttempts = (0, import_util_middleware.normalizeProvider)(input.maxAttempts ?? import_util_retry.DEFAULT_MAX_ATTEMPTS); + return { + ...input, + maxAttempts, + retryStrategy: async () => { + if (retryStrategy) { + return retryStrategy; + } + const retryMode = await (0, import_util_middleware.normalizeProvider)(input.retryMode)(); + if (retryMode === import_util_retry.RETRY_MODES.ADAPTIVE) { + return new import_util_retry.AdaptiveRetryStrategy(maxAttempts); + } + return new import_util_retry.StandardRetryStrategy(maxAttempts); + } + }; +}, "resolveRetryConfig"); +var ENV_RETRY_MODE = "AWS_RETRY_MODE"; +var CONFIG_RETRY_MODE = "retry_mode"; +var NODE_RETRY_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => env[ENV_RETRY_MODE], + configFileSelector: (profile) => profile[CONFIG_RETRY_MODE], + default: import_util_retry.DEFAULT_RETRY_MODE +}; + +// src/omitRetryHeadersMiddleware.ts + + +var omitRetryHeadersMiddleware = /* @__PURE__ */ __name(() => (next) => async (args) => { + const { request } = args; + if (import_protocol_http.HttpRequest.isInstance(request)) { + delete request.headers[import_util_retry.INVOCATION_ID_HEADER]; + delete request.headers[import_util_retry.REQUEST_HEADER]; + } + return next(args); +}, "omitRetryHeadersMiddleware"); +var omitRetryHeadersMiddlewareOptions = { + name: "omitRetryHeadersMiddleware", + tags: ["RETRY", "HEADERS", "OMIT_RETRY_HEADERS"], + relation: "before", + toMiddleware: "awsAuthMiddleware", + override: true +}; +var getOmitRetryHeadersPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: (clientStack) => { + clientStack.addRelativeTo(omitRetryHeadersMiddleware(), omitRetryHeadersMiddlewareOptions); + } +}), "getOmitRetryHeadersPlugin"); + +// src/retryMiddleware.ts + + +var import_smithy_client = __nccwpck_require__(1411); + + +var import_isStreamingPayload = __nccwpck_require__(9831); +var retryMiddleware = /* @__PURE__ */ __name((options) => (next, context) => async (args) => { + var _a; + let retryStrategy = await options.retryStrategy(); + const maxAttempts = await options.maxAttempts(); + if (isRetryStrategyV2(retryStrategy)) { + retryStrategy = retryStrategy; + let retryToken = await retryStrategy.acquireInitialRetryToken(context["partition_id"]); + let lastError = new Error(); + let attempts = 0; + let totalRetryDelay = 0; + const { request } = args; + const isRequest = import_protocol_http.HttpRequest.isInstance(request); + if (isRequest) { + request.headers[import_util_retry.INVOCATION_ID_HEADER] = (0, import_uuid.v4)(); + } + while (true) { + try { + if (isRequest) { + request.headers[import_util_retry.REQUEST_HEADER] = `attempt=${attempts + 1}; max=${maxAttempts}`; + } + const { response, output } = await next(args); + retryStrategy.recordSuccess(retryToken); + output.$metadata.attempts = attempts + 1; + output.$metadata.totalRetryDelay = totalRetryDelay; + return { response, output }; + } catch (e) { + const retryErrorInfo = getRetryErrorInfo(e); + lastError = asSdkError(e); + if (isRequest && (0, import_isStreamingPayload.isStreamingPayload)(request)) { + (_a = context.logger instanceof import_smithy_client.NoOpLogger ? console : context.logger) == null ? void 0 : _a.warn( + "An error was encountered in a non-retryable streaming request." + ); + throw lastError; + } + try { + retryToken = await retryStrategy.refreshRetryTokenForRetry(retryToken, retryErrorInfo); + } catch (refreshError) { + if (!lastError.$metadata) { + lastError.$metadata = {}; + } + lastError.$metadata.attempts = attempts + 1; + lastError.$metadata.totalRetryDelay = totalRetryDelay; + throw lastError; + } + attempts = retryToken.getRetryCount(); + const delay = retryToken.getRetryDelay(); + totalRetryDelay += delay; + await new Promise((resolve) => setTimeout(resolve, delay)); + } + } + } else { + retryStrategy = retryStrategy; + if (retryStrategy == null ? void 0 : retryStrategy.mode) + context.userAgent = [...context.userAgent || [], ["cfg/retry-mode", retryStrategy.mode]]; + return retryStrategy.retry(next, args); + } +}, "retryMiddleware"); +var isRetryStrategyV2 = /* @__PURE__ */ __name((retryStrategy) => typeof retryStrategy.acquireInitialRetryToken !== "undefined" && typeof retryStrategy.refreshRetryTokenForRetry !== "undefined" && typeof retryStrategy.recordSuccess !== "undefined", "isRetryStrategyV2"); +var getRetryErrorInfo = /* @__PURE__ */ __name((error) => { + const errorInfo = { + error, + errorType: getRetryErrorType(error) + }; + const retryAfterHint = getRetryAfterHint(error.$response); + if (retryAfterHint) { + errorInfo.retryAfterHint = retryAfterHint; + } + return errorInfo; +}, "getRetryErrorInfo"); +var getRetryErrorType = /* @__PURE__ */ __name((error) => { + if ((0, import_service_error_classification.isThrottlingError)(error)) + return "THROTTLING"; + if ((0, import_service_error_classification.isTransientError)(error)) + return "TRANSIENT"; + if ((0, import_service_error_classification.isServerError)(error)) + return "SERVER_ERROR"; + return "CLIENT_ERROR"; +}, "getRetryErrorType"); +var retryMiddlewareOptions = { + name: "retryMiddleware", + tags: ["RETRY"], + step: "finalizeRequest", + priority: "high", + override: true +}; +var getRetryPlugin = /* @__PURE__ */ __name((options) => ({ + applyToStack: (clientStack) => { + clientStack.add(retryMiddleware(options), retryMiddlewareOptions); + } +}), "getRetryPlugin"); +var getRetryAfterHint = /* @__PURE__ */ __name((response) => { + if (!import_protocol_http.HttpResponse.isInstance(response)) + return; + const retryAfterHeaderName = Object.keys(response.headers).find((key) => key.toLowerCase() === "retry-after"); + if (!retryAfterHeaderName) + return; + const retryAfter = response.headers[retryAfterHeaderName]; + const retryAfterSeconds = Number(retryAfter); + if (!Number.isNaN(retryAfterSeconds)) + return new Date(retryAfterSeconds * 1e3); + const retryAfterDate = new Date(retryAfter); + return retryAfterDate; +}, "getRetryAfterHint"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 9831: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isStreamingPayload = void 0; +const stream_1 = __nccwpck_require__(2203); +const isStreamingPayload = (request) => (request === null || request === void 0 ? void 0 : request.body) instanceof stream_1.Readable || + (typeof ReadableStream !== "undefined" && (request === null || request === void 0 ? void 0 : request.body) instanceof ReadableStream); +exports.isStreamingPayload = isStreamingPayload; + + +/***/ }), + +/***/ 3255: +/***/ ((module) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + deserializerMiddleware: () => deserializerMiddleware, + deserializerMiddlewareOption: () => deserializerMiddlewareOption, + getSerdePlugin: () => getSerdePlugin, + serializerMiddleware: () => serializerMiddleware, + serializerMiddlewareOption: () => serializerMiddlewareOption +}); +module.exports = __toCommonJS(src_exports); + +// src/deserializerMiddleware.ts +var deserializerMiddleware = /* @__PURE__ */ __name((options, deserializer) => (next) => async (args) => { + const { response } = await next(args); + try { + const parsed = await deserializer(response, options); + return { + response, + output: parsed + }; + } catch (error) { + Object.defineProperty(error, "$response", { + value: response + }); + if (!("$metadata" in error)) { + const hint = `Deserialization error: to see the raw response, inspect the hidden field {error}.$response on this object.`; + error.message += "\n " + hint; + if (typeof error.$responseBodyText !== "undefined") { + if (error.$response) { + error.$response.body = error.$responseBodyText; + } + } + } + throw error; + } +}, "deserializerMiddleware"); + +// src/serializerMiddleware.ts +var serializerMiddleware = /* @__PURE__ */ __name((options, serializer) => (next, context) => async (args) => { + var _a; + const endpoint = ((_a = context.endpointV2) == null ? void 0 : _a.url) && options.urlParser ? async () => options.urlParser(context.endpointV2.url) : options.endpoint; + if (!endpoint) { + throw new Error("No valid endpoint provider available."); + } + const request = await serializer(args.input, { ...options, endpoint }); + return next({ + ...args, + request + }); +}, "serializerMiddleware"); + +// src/serdePlugin.ts +var deserializerMiddlewareOption = { + name: "deserializerMiddleware", + step: "deserialize", + tags: ["DESERIALIZER"], + override: true +}; +var serializerMiddlewareOption = { + name: "serializerMiddleware", + step: "serialize", + tags: ["SERIALIZER"], + override: true +}; +function getSerdePlugin(config, serializer, deserializer) { + return { + applyToStack: (commandStack) => { + commandStack.add(deserializerMiddleware(config, deserializer), deserializerMiddlewareOption); + commandStack.add(serializerMiddleware(config, serializer), serializerMiddlewareOption); + } + }; +} +__name(getSerdePlugin, "getSerdePlugin"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 9208: +/***/ ((module) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + constructStack: () => constructStack +}); +module.exports = __toCommonJS(src_exports); + +// src/MiddlewareStack.ts +var getAllAliases = /* @__PURE__ */ __name((name, aliases) => { + const _aliases = []; + if (name) { + _aliases.push(name); + } + if (aliases) { + for (const alias of aliases) { + _aliases.push(alias); + } + } + return _aliases; +}, "getAllAliases"); +var getMiddlewareNameWithAliases = /* @__PURE__ */ __name((name, aliases) => { + return `${name || "anonymous"}${aliases && aliases.length > 0 ? ` (a.k.a. ${aliases.join(",")})` : ""}`; +}, "getMiddlewareNameWithAliases"); +var constructStack = /* @__PURE__ */ __name(() => { + let absoluteEntries = []; + let relativeEntries = []; + let identifyOnResolve = false; + const entriesNameSet = /* @__PURE__ */ new Set(); + const sort = /* @__PURE__ */ __name((entries) => entries.sort( + (a, b) => stepWeights[b.step] - stepWeights[a.step] || priorityWeights[b.priority || "normal"] - priorityWeights[a.priority || "normal"] + ), "sort"); + const removeByName = /* @__PURE__ */ __name((toRemove) => { + let isRemoved = false; + const filterCb = /* @__PURE__ */ __name((entry) => { + const aliases = getAllAliases(entry.name, entry.aliases); + if (aliases.includes(toRemove)) { + isRemoved = true; + for (const alias of aliases) { + entriesNameSet.delete(alias); + } + return false; + } + return true; + }, "filterCb"); + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, "removeByName"); + const removeByReference = /* @__PURE__ */ __name((toRemove) => { + let isRemoved = false; + const filterCb = /* @__PURE__ */ __name((entry) => { + if (entry.middleware === toRemove) { + isRemoved = true; + for (const alias of getAllAliases(entry.name, entry.aliases)) { + entriesNameSet.delete(alias); + } + return false; + } + return true; + }, "filterCb"); + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, "removeByReference"); + const cloneTo = /* @__PURE__ */ __name((toStack) => { + var _a; + absoluteEntries.forEach((entry) => { + toStack.add(entry.middleware, { ...entry }); + }); + relativeEntries.forEach((entry) => { + toStack.addRelativeTo(entry.middleware, { ...entry }); + }); + (_a = toStack.identifyOnResolve) == null ? void 0 : _a.call(toStack, stack.identifyOnResolve()); + return toStack; + }, "cloneTo"); + const expandRelativeMiddlewareList = /* @__PURE__ */ __name((from) => { + const expandedMiddlewareList = []; + from.before.forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + expandedMiddlewareList.push(from); + from.after.reverse().forEach((entry) => { + if (entry.before.length === 0 && entry.after.length === 0) { + expandedMiddlewareList.push(entry); + } else { + expandedMiddlewareList.push(...expandRelativeMiddlewareList(entry)); + } + }); + return expandedMiddlewareList; + }, "expandRelativeMiddlewareList"); + const getMiddlewareList = /* @__PURE__ */ __name((debug = false) => { + const normalizedAbsoluteEntries = []; + const normalizedRelativeEntries = []; + const normalizedEntriesNameMap = {}; + absoluteEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [] + }; + for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { + normalizedEntriesNameMap[alias] = normalizedEntry; + } + normalizedAbsoluteEntries.push(normalizedEntry); + }); + relativeEntries.forEach((entry) => { + const normalizedEntry = { + ...entry, + before: [], + after: [] + }; + for (const alias of getAllAliases(normalizedEntry.name, normalizedEntry.aliases)) { + normalizedEntriesNameMap[alias] = normalizedEntry; + } + normalizedRelativeEntries.push(normalizedEntry); + }); + normalizedRelativeEntries.forEach((entry) => { + if (entry.toMiddleware) { + const toMiddleware = normalizedEntriesNameMap[entry.toMiddleware]; + if (toMiddleware === void 0) { + if (debug) { + return; + } + throw new Error( + `${entry.toMiddleware} is not found when adding ${getMiddlewareNameWithAliases(entry.name, entry.aliases)} middleware ${entry.relation} ${entry.toMiddleware}` + ); + } + if (entry.relation === "after") { + toMiddleware.after.push(entry); + } + if (entry.relation === "before") { + toMiddleware.before.push(entry); + } + } + }); + const mainChain = sort(normalizedAbsoluteEntries).map(expandRelativeMiddlewareList).reduce( + (wholeList, expandedMiddlewareList) => { + wholeList.push(...expandedMiddlewareList); + return wholeList; + }, + [] + ); + return mainChain; + }, "getMiddlewareList"); + const stack = { + add: (middleware, options = {}) => { + const { name, override, aliases: _aliases } = options; + const entry = { + step: "initialize", + priority: "normal", + middleware, + ...options + }; + const aliases = getAllAliases(name, _aliases); + if (aliases.length > 0) { + if (aliases.some((alias) => entriesNameSet.has(alias))) { + if (!override) + throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); + for (const alias of aliases) { + const toOverrideIndex = absoluteEntries.findIndex( + (entry2) => { + var _a; + return entry2.name === alias || ((_a = entry2.aliases) == null ? void 0 : _a.some((a) => a === alias)); + } + ); + if (toOverrideIndex === -1) { + continue; + } + const toOverride = absoluteEntries[toOverrideIndex]; + if (toOverride.step !== entry.step || entry.priority !== toOverride.priority) { + throw new Error( + `"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware with ${toOverride.priority} priority in ${toOverride.step} step cannot be overridden by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware with ${entry.priority} priority in ${entry.step} step.` + ); + } + absoluteEntries.splice(toOverrideIndex, 1); + } + } + for (const alias of aliases) { + entriesNameSet.add(alias); + } + } + absoluteEntries.push(entry); + }, + addRelativeTo: (middleware, options) => { + const { name, override, aliases: _aliases } = options; + const entry = { + middleware, + ...options + }; + const aliases = getAllAliases(name, _aliases); + if (aliases.length > 0) { + if (aliases.some((alias) => entriesNameSet.has(alias))) { + if (!override) + throw new Error(`Duplicate middleware name '${getMiddlewareNameWithAliases(name, _aliases)}'`); + for (const alias of aliases) { + const toOverrideIndex = relativeEntries.findIndex( + (entry2) => { + var _a; + return entry2.name === alias || ((_a = entry2.aliases) == null ? void 0 : _a.some((a) => a === alias)); + } + ); + if (toOverrideIndex === -1) { + continue; + } + const toOverride = relativeEntries[toOverrideIndex]; + if (toOverride.toMiddleware !== entry.toMiddleware || toOverride.relation !== entry.relation) { + throw new Error( + `"${getMiddlewareNameWithAliases(toOverride.name, toOverride.aliases)}" middleware ${toOverride.relation} "${toOverride.toMiddleware}" middleware cannot be overridden by "${getMiddlewareNameWithAliases(name, _aliases)}" middleware ${entry.relation} "${entry.toMiddleware}" middleware.` + ); + } + relativeEntries.splice(toOverrideIndex, 1); + } + } + for (const alias of aliases) { + entriesNameSet.add(alias); + } + } + relativeEntries.push(entry); + }, + clone: () => cloneTo(constructStack()), + use: (plugin) => { + plugin.applyToStack(stack); + }, + remove: (toRemove) => { + if (typeof toRemove === "string") + return removeByName(toRemove); + else + return removeByReference(toRemove); + }, + removeByTag: (toRemove) => { + let isRemoved = false; + const filterCb = /* @__PURE__ */ __name((entry) => { + const { tags, name, aliases: _aliases } = entry; + if (tags && tags.includes(toRemove)) { + const aliases = getAllAliases(name, _aliases); + for (const alias of aliases) { + entriesNameSet.delete(alias); + } + isRemoved = true; + return false; + } + return true; + }, "filterCb"); + absoluteEntries = absoluteEntries.filter(filterCb); + relativeEntries = relativeEntries.filter(filterCb); + return isRemoved; + }, + concat: (from) => { + var _a; + const cloned = cloneTo(constructStack()); + cloned.use(from); + cloned.identifyOnResolve( + identifyOnResolve || cloned.identifyOnResolve() || (((_a = from.identifyOnResolve) == null ? void 0 : _a.call(from)) ?? false) + ); + return cloned; + }, + applyToStack: cloneTo, + identify: () => { + return getMiddlewareList(true).map((mw) => { + const step = mw.step ?? mw.relation + " " + mw.toMiddleware; + return getMiddlewareNameWithAliases(mw.name, mw.aliases) + " - " + step; + }); + }, + identifyOnResolve(toggle) { + if (typeof toggle === "boolean") + identifyOnResolve = toggle; + return identifyOnResolve; + }, + resolve: (handler, context) => { + for (const middleware of getMiddlewareList().map((entry) => entry.middleware).reverse()) { + handler = middleware(handler, context); + } + if (identifyOnResolve) { + console.log(stack.identify()); + } + return handler; + } + }; + return stack; +}, "constructStack"); +var stepWeights = { + initialize: 5, + serialize: 4, + build: 3, + finalizeRequest: 2, + deserialize: 1 +}; +var priorityWeights = { + high: 3, + normal: 2, + low: 1 +}; +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 5704: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + loadConfig: () => loadConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/configLoader.ts + + +// src/fromEnv.ts +var import_property_provider = __nccwpck_require__(1238); + +// src/getSelectorName.ts +function getSelectorName(functionString) { + try { + const constants = new Set(Array.from(functionString.match(/([A-Z_]){3,}/g) ?? [])); + constants.delete("CONFIG"); + constants.delete("CONFIG_PREFIX_SEPARATOR"); + constants.delete("ENV"); + return [...constants].join(", "); + } catch (e) { + return functionString; + } +} +__name(getSelectorName, "getSelectorName"); + +// src/fromEnv.ts +var fromEnv = /* @__PURE__ */ __name((envVarSelector, logger) => async () => { + try { + const config = envVarSelector(process.env); + if (config === void 0) { + throw new Error(); + } + return config; + } catch (e) { + throw new import_property_provider.CredentialsProviderError( + e.message || `Not found in ENV: ${getSelectorName(envVarSelector.toString())}`, + { logger } + ); + } +}, "fromEnv"); + +// src/fromSharedConfigFiles.ts + +var import_shared_ini_file_loader = __nccwpck_require__(4964); +var fromSharedConfigFiles = /* @__PURE__ */ __name((configSelector, { preferredFile = "config", ...init } = {}) => async () => { + const profile = (0, import_shared_ini_file_loader.getProfileName)(init); + const { configFile, credentialsFile } = await (0, import_shared_ini_file_loader.loadSharedConfigFiles)(init); + const profileFromCredentials = credentialsFile[profile] || {}; + const profileFromConfig = configFile[profile] || {}; + const mergedProfile = preferredFile === "config" ? { ...profileFromCredentials, ...profileFromConfig } : { ...profileFromConfig, ...profileFromCredentials }; + try { + const cfgFile = preferredFile === "config" ? configFile : credentialsFile; + const configValue = configSelector(mergedProfile, cfgFile); + if (configValue === void 0) { + throw new Error(); + } + return configValue; + } catch (e) { + throw new import_property_provider.CredentialsProviderError( + e.message || `Not found in config files w/ profile [${profile}]: ${getSelectorName(configSelector.toString())}`, + { logger: init.logger } + ); + } +}, "fromSharedConfigFiles"); + +// src/fromStatic.ts + +var isFunction = /* @__PURE__ */ __name((func) => typeof func === "function", "isFunction"); +var fromStatic = /* @__PURE__ */ __name((defaultValue) => isFunction(defaultValue) ? async () => await defaultValue() : (0, import_property_provider.fromStatic)(defaultValue), "fromStatic"); + +// src/configLoader.ts +var loadConfig = /* @__PURE__ */ __name(({ environmentVariableSelector, configFileSelector, default: defaultValue }, configuration = {}) => (0, import_property_provider.memoize)( + (0, import_property_provider.chain)( + fromEnv(environmentVariableSelector), + fromSharedConfigFiles(configFileSelector, configuration), + fromStatic(defaultValue) + ) +), "loadConfig"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 1279: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + DEFAULT_REQUEST_TIMEOUT: () => DEFAULT_REQUEST_TIMEOUT, + NodeHttp2Handler: () => NodeHttp2Handler, + NodeHttpHandler: () => NodeHttpHandler, + streamCollector: () => streamCollector +}); +module.exports = __toCommonJS(src_exports); + +// src/node-http-handler.ts +var import_protocol_http = __nccwpck_require__(2356); +var import_querystring_builder = __nccwpck_require__(8256); +var import_http = __nccwpck_require__(8611); +var import_https = __nccwpck_require__(5692); + +// src/constants.ts +var NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "EPIPE", "ETIMEDOUT"]; + +// src/get-transformed-headers.ts +var getTransformedHeaders = /* @__PURE__ */ __name((headers) => { + const transformedHeaders = {}; + for (const name of Object.keys(headers)) { + const headerValues = headers[name]; + transformedHeaders[name] = Array.isArray(headerValues) ? headerValues.join(",") : headerValues; + } + return transformedHeaders; +}, "getTransformedHeaders"); + +// src/timing.ts +var timing = { + setTimeout: (cb, ms) => setTimeout(cb, ms), + clearTimeout: (timeoutId) => clearTimeout(timeoutId) +}; + +// src/set-connection-timeout.ts +var DEFER_EVENT_LISTENER_TIME = 1e3; +var setConnectionTimeout = /* @__PURE__ */ __name((request, reject, timeoutInMs = 0) => { + if (!timeoutInMs) { + return -1; + } + const registerTimeout = /* @__PURE__ */ __name((offset) => { + const timeoutId = timing.setTimeout(() => { + request.destroy(); + reject( + Object.assign(new Error(`Socket timed out without establishing a connection within ${timeoutInMs} ms`), { + name: "TimeoutError" + }) + ); + }, timeoutInMs - offset); + const doWithSocket = /* @__PURE__ */ __name((socket) => { + if (socket == null ? void 0 : socket.connecting) { + socket.on("connect", () => { + timing.clearTimeout(timeoutId); + }); + } else { + timing.clearTimeout(timeoutId); + } + }, "doWithSocket"); + if (request.socket) { + doWithSocket(request.socket); + } else { + request.on("socket", doWithSocket); + } + }, "registerTimeout"); + if (timeoutInMs < 2e3) { + registerTimeout(0); + return 0; + } + return timing.setTimeout(registerTimeout.bind(null, DEFER_EVENT_LISTENER_TIME), DEFER_EVENT_LISTENER_TIME); +}, "setConnectionTimeout"); + +// src/set-socket-keep-alive.ts +var DEFER_EVENT_LISTENER_TIME2 = 3e3; +var setSocketKeepAlive = /* @__PURE__ */ __name((request, { keepAlive, keepAliveMsecs }, deferTimeMs = DEFER_EVENT_LISTENER_TIME2) => { + if (keepAlive !== true) { + return -1; + } + const registerListener = /* @__PURE__ */ __name(() => { + if (request.socket) { + request.socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + } else { + request.on("socket", (socket) => { + socket.setKeepAlive(keepAlive, keepAliveMsecs || 0); + }); + } + }, "registerListener"); + if (deferTimeMs === 0) { + registerListener(); + return 0; + } + return timing.setTimeout(registerListener, deferTimeMs); +}, "setSocketKeepAlive"); + +// src/set-socket-timeout.ts +var DEFER_EVENT_LISTENER_TIME3 = 3e3; +var setSocketTimeout = /* @__PURE__ */ __name((request, reject, timeoutInMs = 0) => { + const registerTimeout = /* @__PURE__ */ __name((offset) => { + const timeout = timeoutInMs - offset; + const onTimeout = /* @__PURE__ */ __name(() => { + request.destroy(); + reject(Object.assign(new Error(`Connection timed out after ${timeoutInMs} ms`), { name: "TimeoutError" })); + }, "onTimeout"); + if (request.socket) { + request.socket.setTimeout(timeout, onTimeout); + } else { + request.setTimeout(timeout, onTimeout); + } + }, "registerTimeout"); + if (0 < timeoutInMs && timeoutInMs < 6e3) { + registerTimeout(0); + return 0; + } + return timing.setTimeout( + registerTimeout.bind(null, timeoutInMs === 0 ? 0 : DEFER_EVENT_LISTENER_TIME3), + DEFER_EVENT_LISTENER_TIME3 + ); +}, "setSocketTimeout"); + +// src/write-request-body.ts +var import_stream = __nccwpck_require__(2203); +var MIN_WAIT_TIME = 1e3; +async function writeRequestBody(httpRequest, request, maxContinueTimeoutMs = MIN_WAIT_TIME) { + const headers = request.headers ?? {}; + const expect = headers["Expect"] || headers["expect"]; + let timeoutId = -1; + let sendBody = true; + if (expect === "100-continue") { + sendBody = await Promise.race([ + new Promise((resolve) => { + timeoutId = Number(timing.setTimeout(resolve, Math.max(MIN_WAIT_TIME, maxContinueTimeoutMs))); + }), + new Promise((resolve) => { + httpRequest.on("continue", () => { + timing.clearTimeout(timeoutId); + resolve(true); + }); + httpRequest.on("response", () => { + timing.clearTimeout(timeoutId); + resolve(false); + }); + httpRequest.on("error", () => { + timing.clearTimeout(timeoutId); + resolve(false); + }); + }) + ]); + } + if (sendBody) { + writeBody(httpRequest, request.body); + } +} +__name(writeRequestBody, "writeRequestBody"); +function writeBody(httpRequest, body) { + if (body instanceof import_stream.Readable) { + body.pipe(httpRequest); + return; + } + if (body) { + if (Buffer.isBuffer(body) || typeof body === "string") { + httpRequest.end(body); + return; + } + const uint8 = body; + if (typeof uint8 === "object" && uint8.buffer && typeof uint8.byteOffset === "number" && typeof uint8.byteLength === "number") { + httpRequest.end(Buffer.from(uint8.buffer, uint8.byteOffset, uint8.byteLength)); + return; + } + httpRequest.end(Buffer.from(body)); + return; + } + httpRequest.end(); +} +__name(writeBody, "writeBody"); + +// src/node-http-handler.ts +var DEFAULT_REQUEST_TIMEOUT = 0; +var _NodeHttpHandler = class _NodeHttpHandler { + constructor(options) { + this.socketWarningTimestamp = 0; + // Node http handler is hard-coded to http/1.1: https://github.com/nodejs/node/blob/ff5664b83b89c55e4ab5d5f60068fb457f1f5872/lib/_http_server.js#L286 + this.metadata = { handlerProtocol: "http/1.1" }; + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options().then((_options) => { + resolve(this.resolveDefaultConfig(_options)); + }).catch(reject); + } else { + resolve(this.resolveDefaultConfig(options)); + } + }); + } + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions) { + if (typeof (instanceOrOptions == null ? void 0 : instanceOrOptions.handle) === "function") { + return instanceOrOptions; + } + return new _NodeHttpHandler(instanceOrOptions); + } + /** + * @internal + * + * @param agent - http(s) agent in use by the NodeHttpHandler instance. + * @param socketWarningTimestamp - last socket usage check timestamp. + * @param logger - channel for the warning. + * @returns timestamp of last emitted warning. + */ + static checkSocketUsage(agent, socketWarningTimestamp, logger = console) { + var _a, _b, _c; + const { sockets, requests, maxSockets } = agent; + if (typeof maxSockets !== "number" || maxSockets === Infinity) { + return socketWarningTimestamp; + } + const interval = 15e3; + if (Date.now() - interval < socketWarningTimestamp) { + return socketWarningTimestamp; + } + if (sockets && requests) { + for (const origin in sockets) { + const socketsInUse = ((_a = sockets[origin]) == null ? void 0 : _a.length) ?? 0; + const requestsEnqueued = ((_b = requests[origin]) == null ? void 0 : _b.length) ?? 0; + if (socketsInUse >= maxSockets && requestsEnqueued >= 2 * maxSockets) { + (_c = logger == null ? void 0 : logger.warn) == null ? void 0 : _c.call( + logger, + `@smithy/node-http-handler:WARN - socket usage at capacity=${socketsInUse} and ${requestsEnqueued} additional requests are enqueued. +See https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/node-configuring-maxsockets.html +or increase socketAcquisitionWarningTimeout=(millis) in the NodeHttpHandler config.` + ); + return Date.now(); + } + } + } + return socketWarningTimestamp; + } + resolveDefaultConfig(options) { + const { requestTimeout, connectionTimeout, socketTimeout, httpAgent, httpsAgent } = options || {}; + const keepAlive = true; + const maxSockets = 50; + return { + connectionTimeout, + requestTimeout: requestTimeout ?? socketTimeout, + httpAgent: (() => { + if (httpAgent instanceof import_http.Agent || typeof (httpAgent == null ? void 0 : httpAgent.destroy) === "function") { + return httpAgent; + } + return new import_http.Agent({ keepAlive, maxSockets, ...httpAgent }); + })(), + httpsAgent: (() => { + if (httpsAgent instanceof import_https.Agent || typeof (httpsAgent == null ? void 0 : httpsAgent.destroy) === "function") { + return httpsAgent; + } + return new import_https.Agent({ keepAlive, maxSockets, ...httpsAgent }); + })(), + logger: console + }; + } + destroy() { + var _a, _b, _c, _d; + (_b = (_a = this.config) == null ? void 0 : _a.httpAgent) == null ? void 0 : _b.destroy(); + (_d = (_c = this.config) == null ? void 0 : _c.httpsAgent) == null ? void 0 : _d.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + } + return new Promise((_resolve, _reject) => { + let writeRequestBodyPromise = void 0; + const timeouts = []; + const resolve = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + timeouts.forEach(timing.clearTimeout); + _resolve(arg); + }, "resolve"); + const reject = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + timeouts.forEach(timing.clearTimeout); + _reject(arg); + }, "reject"); + if (!this.config) { + throw new Error("Node HTTP request handler config is not resolved"); + } + if (abortSignal == null ? void 0 : abortSignal.aborted) { + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const isSSL = request.protocol === "https:"; + const agent = isSSL ? this.config.httpsAgent : this.config.httpAgent; + timeouts.push( + timing.setTimeout( + () => { + this.socketWarningTimestamp = _NodeHttpHandler.checkSocketUsage( + agent, + this.socketWarningTimestamp, + this.config.logger + ); + }, + this.config.socketAcquisitionWarningTimeout ?? (this.config.requestTimeout ?? 2e3) + (this.config.connectionTimeout ?? 1e3) + ) + ); + const queryString = (0, import_querystring_builder.buildQueryString)(request.query || {}); + let auth = void 0; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}`; + } + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + let hostname = request.hostname ?? ""; + if (hostname[0] === "[" && hostname.endsWith("]")) { + hostname = request.hostname.slice(1, -1); + } else { + hostname = request.hostname; + } + const nodeHttpsOptions = { + headers: request.headers, + host: hostname, + method: request.method, + path, + port: request.port, + agent, + auth + }; + const requestFunc = isSSL ? import_https.request : import_http.request; + const req = requestFunc(nodeHttpsOptions, (res) => { + const httpResponse = new import_protocol_http.HttpResponse({ + statusCode: res.statusCode || -1, + reason: res.statusMessage, + headers: getTransformedHeaders(res.headers), + body: res + }); + resolve({ response: httpResponse }); + }); + req.on("error", (err) => { + if (NODEJS_TIMEOUT_ERROR_CODES.includes(err.code)) { + reject(Object.assign(err, { name: "TimeoutError" })); + } else { + reject(err); + } + }); + if (abortSignal) { + const onAbort = /* @__PURE__ */ __name(() => { + req.destroy(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + }, "onAbort"); + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + req.once("close", () => signal.removeEventListener("abort", onAbort)); + } else { + abortSignal.onabort = onAbort; + } + } + timeouts.push(setConnectionTimeout(req, reject, this.config.connectionTimeout)); + timeouts.push(setSocketTimeout(req, reject, this.config.requestTimeout)); + const httpAgent = nodeHttpsOptions.agent; + if (typeof httpAgent === "object" && "keepAlive" in httpAgent) { + timeouts.push( + setSocketKeepAlive(req, { + // @ts-expect-error keepAlive is not public on httpAgent. + keepAlive: httpAgent.keepAlive, + // @ts-expect-error keepAliveMsecs is not public on httpAgent. + keepAliveMsecs: httpAgent.keepAliveMsecs + }) + ); + } + writeRequestBodyPromise = writeRequestBody(req, request, this.config.requestTimeout).catch((e) => { + timeouts.forEach(timing.clearTimeout); + return _reject(e); + }); + }); + } + updateHttpClientConfig(key, value) { + this.config = void 0; + this.configProvider = this.configProvider.then((config) => { + return { + ...config, + [key]: value + }; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } +}; +__name(_NodeHttpHandler, "NodeHttpHandler"); +var NodeHttpHandler = _NodeHttpHandler; + +// src/node-http2-handler.ts + + +var import_http22 = __nccwpck_require__(5675); + +// src/node-http2-connection-manager.ts +var import_http2 = __toESM(__nccwpck_require__(5675)); + +// src/node-http2-connection-pool.ts +var _NodeHttp2ConnectionPool = class _NodeHttp2ConnectionPool { + constructor(sessions) { + this.sessions = []; + this.sessions = sessions ?? []; + } + poll() { + if (this.sessions.length > 0) { + return this.sessions.shift(); + } + } + offerLast(session) { + this.sessions.push(session); + } + contains(session) { + return this.sessions.includes(session); + } + remove(session) { + this.sessions = this.sessions.filter((s) => s !== session); + } + [Symbol.iterator]() { + return this.sessions[Symbol.iterator](); + } + destroy(connection) { + for (const session of this.sessions) { + if (session === connection) { + if (!session.destroyed) { + session.destroy(); + } + } + } + } +}; +__name(_NodeHttp2ConnectionPool, "NodeHttp2ConnectionPool"); +var NodeHttp2ConnectionPool = _NodeHttp2ConnectionPool; + +// src/node-http2-connection-manager.ts +var _NodeHttp2ConnectionManager = class _NodeHttp2ConnectionManager { + constructor(config) { + this.sessionCache = /* @__PURE__ */ new Map(); + this.config = config; + if (this.config.maxConcurrency && this.config.maxConcurrency <= 0) { + throw new RangeError("maxConcurrency must be greater than zero."); + } + } + lease(requestContext, connectionConfiguration) { + const url = this.getUrlString(requestContext); + const existingPool = this.sessionCache.get(url); + if (existingPool) { + const existingSession = existingPool.poll(); + if (existingSession && !this.config.disableConcurrency) { + return existingSession; + } + } + const session = import_http2.default.connect(url); + if (this.config.maxConcurrency) { + session.settings({ maxConcurrentStreams: this.config.maxConcurrency }, (err) => { + if (err) { + throw new Error( + "Fail to set maxConcurrentStreams to " + this.config.maxConcurrency + "when creating new session for " + requestContext.destination.toString() + ); + } + }); + } + session.unref(); + const destroySessionCb = /* @__PURE__ */ __name(() => { + session.destroy(); + this.deleteSession(url, session); + }, "destroySessionCb"); + session.on("goaway", destroySessionCb); + session.on("error", destroySessionCb); + session.on("frameError", destroySessionCb); + session.on("close", () => this.deleteSession(url, session)); + if (connectionConfiguration.requestTimeout) { + session.setTimeout(connectionConfiguration.requestTimeout, destroySessionCb); + } + const connectionPool = this.sessionCache.get(url) || new NodeHttp2ConnectionPool(); + connectionPool.offerLast(session); + this.sessionCache.set(url, connectionPool); + return session; + } + /** + * Delete a session from the connection pool. + * @param authority The authority of the session to delete. + * @param session The session to delete. + */ + deleteSession(authority, session) { + const existingConnectionPool = this.sessionCache.get(authority); + if (!existingConnectionPool) { + return; + } + if (!existingConnectionPool.contains(session)) { + return; + } + existingConnectionPool.remove(session); + this.sessionCache.set(authority, existingConnectionPool); + } + release(requestContext, session) { + var _a; + const cacheKey = this.getUrlString(requestContext); + (_a = this.sessionCache.get(cacheKey)) == null ? void 0 : _a.offerLast(session); + } + destroy() { + for (const [key, connectionPool] of this.sessionCache) { + for (const session of connectionPool) { + if (!session.destroyed) { + session.destroy(); + } + connectionPool.remove(session); + } + this.sessionCache.delete(key); + } + } + setMaxConcurrentStreams(maxConcurrentStreams) { + if (maxConcurrentStreams && maxConcurrentStreams <= 0) { + throw new RangeError("maxConcurrentStreams must be greater than zero."); + } + this.config.maxConcurrency = maxConcurrentStreams; + } + setDisableConcurrentStreams(disableConcurrentStreams) { + this.config.disableConcurrency = disableConcurrentStreams; + } + getUrlString(request) { + return request.destination.toString(); + } +}; +__name(_NodeHttp2ConnectionManager, "NodeHttp2ConnectionManager"); +var NodeHttp2ConnectionManager = _NodeHttp2ConnectionManager; + +// src/node-http2-handler.ts +var _NodeHttp2Handler = class _NodeHttp2Handler { + constructor(options) { + this.metadata = { handlerProtocol: "h2" }; + this.connectionManager = new NodeHttp2ConnectionManager({}); + this.configProvider = new Promise((resolve, reject) => { + if (typeof options === "function") { + options().then((opts) => { + resolve(opts || {}); + }).catch(reject); + } else { + resolve(options || {}); + } + }); + } + /** + * @returns the input if it is an HttpHandler of any class, + * or instantiates a new instance of this handler. + */ + static create(instanceOrOptions) { + if (typeof (instanceOrOptions == null ? void 0 : instanceOrOptions.handle) === "function") { + return instanceOrOptions; + } + return new _NodeHttp2Handler(instanceOrOptions); + } + destroy() { + this.connectionManager.destroy(); + } + async handle(request, { abortSignal } = {}) { + if (!this.config) { + this.config = await this.configProvider; + this.connectionManager.setDisableConcurrentStreams(this.config.disableConcurrentStreams || false); + if (this.config.maxConcurrentStreams) { + this.connectionManager.setMaxConcurrentStreams(this.config.maxConcurrentStreams); + } + } + const { requestTimeout, disableConcurrentStreams } = this.config; + return new Promise((_resolve, _reject) => { + var _a; + let fulfilled = false; + let writeRequestBodyPromise = void 0; + const resolve = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + _resolve(arg); + }, "resolve"); + const reject = /* @__PURE__ */ __name(async (arg) => { + await writeRequestBodyPromise; + _reject(arg); + }, "reject"); + if (abortSignal == null ? void 0 : abortSignal.aborted) { + fulfilled = true; + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + reject(abortError); + return; + } + const { hostname, method, port, protocol, query } = request; + let auth = ""; + if (request.username != null || request.password != null) { + const username = request.username ?? ""; + const password = request.password ?? ""; + auth = `${username}:${password}@`; + } + const authority = `${protocol}//${auth}${hostname}${port ? `:${port}` : ""}`; + const requestContext = { destination: new URL(authority) }; + const session = this.connectionManager.lease(requestContext, { + requestTimeout: (_a = this.config) == null ? void 0 : _a.sessionTimeout, + disableConcurrentStreams: disableConcurrentStreams || false + }); + const rejectWithDestroy = /* @__PURE__ */ __name((err) => { + if (disableConcurrentStreams) { + this.destroySession(session); + } + fulfilled = true; + reject(err); + }, "rejectWithDestroy"); + const queryString = (0, import_querystring_builder.buildQueryString)(query || {}); + let path = request.path; + if (queryString) { + path += `?${queryString}`; + } + if (request.fragment) { + path += `#${request.fragment}`; + } + const req = session.request({ + ...request.headers, + [import_http22.constants.HTTP2_HEADER_PATH]: path, + [import_http22.constants.HTTP2_HEADER_METHOD]: method + }); + session.ref(); + req.on("response", (headers) => { + const httpResponse = new import_protocol_http.HttpResponse({ + statusCode: headers[":status"] || -1, + headers: getTransformedHeaders(headers), + body: req + }); + fulfilled = true; + resolve({ response: httpResponse }); + if (disableConcurrentStreams) { + session.close(); + this.connectionManager.deleteSession(authority, session); + } + }); + if (requestTimeout) { + req.setTimeout(requestTimeout, () => { + req.close(); + const timeoutError = new Error(`Stream timed out because of no activity for ${requestTimeout} ms`); + timeoutError.name = "TimeoutError"; + rejectWithDestroy(timeoutError); + }); + } + if (abortSignal) { + const onAbort = /* @__PURE__ */ __name(() => { + req.close(); + const abortError = new Error("Request aborted"); + abortError.name = "AbortError"; + rejectWithDestroy(abortError); + }, "onAbort"); + if (typeof abortSignal.addEventListener === "function") { + const signal = abortSignal; + signal.addEventListener("abort", onAbort, { once: true }); + req.once("close", () => signal.removeEventListener("abort", onAbort)); + } else { + abortSignal.onabort = onAbort; + } + } + req.on("frameError", (type, code, id) => { + rejectWithDestroy(new Error(`Frame type id ${type} in stream id ${id} has failed with code ${code}.`)); + }); + req.on("error", rejectWithDestroy); + req.on("aborted", () => { + rejectWithDestroy( + new Error(`HTTP/2 stream is abnormally aborted in mid-communication with result code ${req.rstCode}.`) + ); + }); + req.on("close", () => { + session.unref(); + if (disableConcurrentStreams) { + session.destroy(); + } + if (!fulfilled) { + rejectWithDestroy(new Error("Unexpected error: http2 request did not get a response")); + } + }); + writeRequestBodyPromise = writeRequestBody(req, request, requestTimeout); + }); + } + updateHttpClientConfig(key, value) { + this.config = void 0; + this.configProvider = this.configProvider.then((config) => { + return { + ...config, + [key]: value + }; + }); + } + httpHandlerConfigs() { + return this.config ?? {}; + } + /** + * Destroys a session. + * @param session The session to destroy. + */ + destroySession(session) { + if (!session.destroyed) { + session.destroy(); + } + } +}; +__name(_NodeHttp2Handler, "NodeHttp2Handler"); +var NodeHttp2Handler = _NodeHttp2Handler; + +// src/stream-collector/collector.ts + +var _Collector = class _Collector extends import_stream.Writable { + constructor() { + super(...arguments); + this.bufferedBytes = []; + } + _write(chunk, encoding, callback) { + this.bufferedBytes.push(chunk); + callback(); + } +}; +__name(_Collector, "Collector"); +var Collector = _Collector; + +// src/stream-collector/index.ts +var streamCollector = /* @__PURE__ */ __name((stream) => { + if (isReadableStreamInstance(stream)) { + return collectReadableStream(stream); + } + return new Promise((resolve, reject) => { + const collector = new Collector(); + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); + }); + collector.on("error", reject); + collector.on("finish", function() { + const bytes = new Uint8Array(Buffer.concat(this.bufferedBytes)); + resolve(bytes); + }); + }); +}, "streamCollector"); +var isReadableStreamInstance = /* @__PURE__ */ __name((stream) => typeof ReadableStream === "function" && stream instanceof ReadableStream, "isReadableStreamInstance"); +async function collectReadableStream(stream) { + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + let length = 0; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + length += value.length; + } + isDone = done; + } + const collected = new Uint8Array(length); + let offset = 0; + for (const chunk of chunks) { + collected.set(chunk, offset); + offset += chunk.length; + } + return collected; +} +__name(collectReadableStream, "collectReadableStream"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 1238: +/***/ ((module) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + CredentialsProviderError: () => CredentialsProviderError, + ProviderError: () => ProviderError, + TokenProviderError: () => TokenProviderError, + chain: () => chain, + fromStatic: () => fromStatic, + memoize: () => memoize +}); +module.exports = __toCommonJS(src_exports); + +// src/ProviderError.ts +var _ProviderError = class _ProviderError extends Error { + constructor(message, options = true) { + var _a; + let logger; + let tryNextLink = true; + if (typeof options === "boolean") { + logger = void 0; + tryNextLink = options; + } else if (options != null && typeof options === "object") { + logger = options.logger; + tryNextLink = options.tryNextLink ?? true; + } + super(message); + this.name = "ProviderError"; + this.tryNextLink = tryNextLink; + Object.setPrototypeOf(this, _ProviderError.prototype); + (_a = logger == null ? void 0 : logger.debug) == null ? void 0 : _a.call(logger, `@smithy/property-provider ${tryNextLink ? "->" : "(!)"} ${message}`); + } + /** + * @deprecated use new operator. + */ + static from(error, options = true) { + return Object.assign(new this(error.message, options), error); + } +}; +__name(_ProviderError, "ProviderError"); +var ProviderError = _ProviderError; + +// src/CredentialsProviderError.ts +var _CredentialsProviderError = class _CredentialsProviderError extends ProviderError { + /** + * @override + */ + constructor(message, options = true) { + super(message, options); + this.name = "CredentialsProviderError"; + Object.setPrototypeOf(this, _CredentialsProviderError.prototype); + } +}; +__name(_CredentialsProviderError, "CredentialsProviderError"); +var CredentialsProviderError = _CredentialsProviderError; + +// src/TokenProviderError.ts +var _TokenProviderError = class _TokenProviderError extends ProviderError { + /** + * @override + */ + constructor(message, options = true) { + super(message, options); + this.name = "TokenProviderError"; + Object.setPrototypeOf(this, _TokenProviderError.prototype); + } +}; +__name(_TokenProviderError, "TokenProviderError"); +var TokenProviderError = _TokenProviderError; + +// src/chain.ts +var chain = /* @__PURE__ */ __name((...providers) => async () => { + if (providers.length === 0) { + throw new ProviderError("No providers in chain"); + } + let lastProviderError; + for (const provider of providers) { + try { + const credentials = await provider(); + return credentials; + } catch (err) { + lastProviderError = err; + if (err == null ? void 0 : err.tryNextLink) { + continue; + } + throw err; + } + } + throw lastProviderError; +}, "chain"); + +// src/fromStatic.ts +var fromStatic = /* @__PURE__ */ __name((staticValue) => () => Promise.resolve(staticValue), "fromStatic"); + +// src/memoize.ts +var memoize = /* @__PURE__ */ __name((provider, isExpired, requiresRefresh) => { + let resolved; + let pending; + let hasResult; + let isConstant = false; + const coalesceProvider = /* @__PURE__ */ __name(async () => { + if (!pending) { + pending = provider(); + } + try { + resolved = await pending; + hasResult = true; + isConstant = false; + } finally { + pending = void 0; + } + return resolved; + }, "coalesceProvider"); + if (isExpired === void 0) { + return async (options) => { + if (!hasResult || (options == null ? void 0 : options.forceRefresh)) { + resolved = await coalesceProvider(); + } + return resolved; + }; + } + return async (options) => { + if (!hasResult || (options == null ? void 0 : options.forceRefresh)) { + resolved = await coalesceProvider(); + } + if (isConstant) { + return resolved; + } + if (requiresRefresh && !requiresRefresh(resolved)) { + isConstant = true; + return resolved; + } + if (isExpired(resolved)) { + await coalesceProvider(); + return resolved; + } + return resolved; + }; +}, "memoize"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 2356: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Field: () => Field, + Fields: () => Fields, + HttpRequest: () => HttpRequest, + HttpResponse: () => HttpResponse, + IHttpRequest: () => import_types.HttpRequest, + getHttpHandlerExtensionConfiguration: () => getHttpHandlerExtensionConfiguration, + isValidHostname: () => isValidHostname, + resolveHttpHandlerRuntimeConfig: () => resolveHttpHandlerRuntimeConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/extensions/httpExtensionConfiguration.ts +var getHttpHandlerExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + let httpHandler = runtimeConfig.httpHandler; + return { + setHttpHandler(handler) { + httpHandler = handler; + }, + httpHandler() { + return httpHandler; + }, + updateHttpClientConfig(key, value) { + httpHandler.updateHttpClientConfig(key, value); + }, + httpHandlerConfigs() { + return httpHandler.httpHandlerConfigs(); + } + }; +}, "getHttpHandlerExtensionConfiguration"); +var resolveHttpHandlerRuntimeConfig = /* @__PURE__ */ __name((httpHandlerExtensionConfiguration) => { + return { + httpHandler: httpHandlerExtensionConfiguration.httpHandler() + }; +}, "resolveHttpHandlerRuntimeConfig"); + +// src/Field.ts +var import_types = __nccwpck_require__(690); +var _Field = class _Field { + constructor({ name, kind = import_types.FieldPosition.HEADER, values = [] }) { + this.name = name; + this.kind = kind; + this.values = values; + } + /** + * Appends a value to the field. + * + * @param value The value to append. + */ + add(value) { + this.values.push(value); + } + /** + * Overwrite existing field values. + * + * @param values The new field values. + */ + set(values) { + this.values = values; + } + /** + * Remove all matching entries from list. + * + * @param value Value to remove. + */ + remove(value) { + this.values = this.values.filter((v) => v !== value); + } + /** + * Get comma-delimited string. + * + * @returns String representation of {@link Field}. + */ + toString() { + return this.values.map((v) => v.includes(",") || v.includes(" ") ? `"${v}"` : v).join(", "); + } + /** + * Get string values as a list + * + * @returns Values in {@link Field} as a list. + */ + get() { + return this.values; + } +}; +__name(_Field, "Field"); +var Field = _Field; + +// src/Fields.ts +var _Fields = class _Fields { + constructor({ fields = [], encoding = "utf-8" }) { + this.entries = {}; + fields.forEach(this.setField.bind(this)); + this.encoding = encoding; + } + /** + * Set entry for a {@link Field} name. The `name` + * attribute will be used to key the collection. + * + * @param field The {@link Field} to set. + */ + setField(field) { + this.entries[field.name.toLowerCase()] = field; + } + /** + * Retrieve {@link Field} entry by name. + * + * @param name The name of the {@link Field} entry + * to retrieve + * @returns The {@link Field} if it exists. + */ + getField(name) { + return this.entries[name.toLowerCase()]; + } + /** + * Delete entry from collection. + * + * @param name Name of the entry to delete. + */ + removeField(name) { + delete this.entries[name.toLowerCase()]; + } + /** + * Helper function for retrieving specific types of fields. + * Used to grab all headers or all trailers. + * + * @param kind {@link FieldPosition} of entries to retrieve. + * @returns The {@link Field} entries with the specified + * {@link FieldPosition}. + */ + getByType(kind) { + return Object.values(this.entries).filter((field) => field.kind === kind); + } +}; +__name(_Fields, "Fields"); +var Fields = _Fields; + +// src/httpRequest.ts + +var _HttpRequest = class _HttpRequest { + constructor(options) { + this.method = options.method || "GET"; + this.hostname = options.hostname || "localhost"; + this.port = options.port; + this.query = options.query || {}; + this.headers = options.headers || {}; + this.body = options.body; + this.protocol = options.protocol ? options.protocol.slice(-1) !== ":" ? `${options.protocol}:` : options.protocol : "https:"; + this.path = options.path ? options.path.charAt(0) !== "/" ? `/${options.path}` : options.path : "/"; + this.username = options.username; + this.password = options.password; + this.fragment = options.fragment; + } + /** + * Note: this does not deep-clone the body. + */ + static clone(request) { + const cloned = new _HttpRequest({ + ...request, + headers: { ...request.headers } + }); + if (cloned.query) { + cloned.query = cloneQuery(cloned.query); + } + return cloned; + } + /** + * This method only actually asserts that request is the interface {@link IHttpRequest}, + * and not necessarily this concrete class. Left in place for API stability. + * + * Do not call instance methods on the input of this function, and + * do not assume it has the HttpRequest prototype. + */ + static isInstance(request) { + if (!request) { + return false; + } + const req = request; + return "method" in req && "protocol" in req && "hostname" in req && "path" in req && typeof req["query"] === "object" && typeof req["headers"] === "object"; + } + /** + * @deprecated use static HttpRequest.clone(request) instead. It's not safe to call + * this method because {@link HttpRequest.isInstance} incorrectly + * asserts that IHttpRequest (interface) objects are of type HttpRequest (class). + */ + clone() { + return _HttpRequest.clone(this); + } +}; +__name(_HttpRequest, "HttpRequest"); +var HttpRequest = _HttpRequest; +function cloneQuery(query) { + return Object.keys(query).reduce((carry, paramName) => { + const param = query[paramName]; + return { + ...carry, + [paramName]: Array.isArray(param) ? [...param] : param + }; + }, {}); +} +__name(cloneQuery, "cloneQuery"); + +// src/httpResponse.ts +var _HttpResponse = class _HttpResponse { + constructor(options) { + this.statusCode = options.statusCode; + this.reason = options.reason; + this.headers = options.headers || {}; + this.body = options.body; + } + static isInstance(response) { + if (!response) + return false; + const resp = response; + return typeof resp.statusCode === "number" && typeof resp.headers === "object"; + } +}; +__name(_HttpResponse, "HttpResponse"); +var HttpResponse = _HttpResponse; + +// src/isValidHostname.ts +function isValidHostname(hostname) { + const hostPattern = /^[a-z0-9][a-z0-9\.\-]*[a-z0-9]$/; + return hostPattern.test(hostname); +} +__name(isValidHostname, "isValidHostname"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 8256: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + buildQueryString: () => buildQueryString +}); +module.exports = __toCommonJS(src_exports); +var import_util_uri_escape = __nccwpck_require__(146); +function buildQueryString(query) { + const parts = []; + for (let key of Object.keys(query).sort()) { + const value = query[key]; + key = (0, import_util_uri_escape.escapeUri)(key); + if (Array.isArray(value)) { + for (let i = 0, iLen = value.length; i < iLen; i++) { + parts.push(`${key}=${(0, import_util_uri_escape.escapeUri)(value[i])}`); + } + } else { + let qsEntry = key; + if (value || typeof value === "string") { + qsEntry += `=${(0, import_util_uri_escape.escapeUri)(value)}`; + } + parts.push(qsEntry); + } + } + return parts.join("&"); +} +__name(buildQueryString, "buildQueryString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 8822: +/***/ ((module) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + parseQueryString: () => parseQueryString +}); +module.exports = __toCommonJS(src_exports); +function parseQueryString(querystring) { + const query = {}; + querystring = querystring.replace(/^\?/, ""); + if (querystring) { + for (const pair of querystring.split("&")) { + let [key, value = null] = pair.split("="); + key = decodeURIComponent(key); + if (value) { + value = decodeURIComponent(value); + } + if (!(key in query)) { + query[key] = value; + } else if (Array.isArray(query[key])) { + query[key].push(value); + } else { + query[key] = [query[key], value]; + } + } + } + return query; +} +__name(parseQueryString, "parseQueryString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 2058: +/***/ ((module) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + isClockSkewCorrectedError: () => isClockSkewCorrectedError, + isClockSkewError: () => isClockSkewError, + isRetryableByTrait: () => isRetryableByTrait, + isServerError: () => isServerError, + isThrottlingError: () => isThrottlingError, + isTransientError: () => isTransientError +}); +module.exports = __toCommonJS(src_exports); + +// src/constants.ts +var CLOCK_SKEW_ERROR_CODES = [ + "AuthFailure", + "InvalidSignatureException", + "RequestExpired", + "RequestInTheFuture", + "RequestTimeTooSkewed", + "SignatureDoesNotMatch" +]; +var THROTTLING_ERROR_CODES = [ + "BandwidthLimitExceeded", + "EC2ThrottledException", + "LimitExceededException", + "PriorRequestNotComplete", + "ProvisionedThroughputExceededException", + "RequestLimitExceeded", + "RequestThrottled", + "RequestThrottledException", + "SlowDown", + "ThrottledException", + "Throttling", + "ThrottlingException", + "TooManyRequestsException", + "TransactionInProgressException" + // DynamoDB +]; +var TRANSIENT_ERROR_CODES = ["TimeoutError", "RequestTimeout", "RequestTimeoutException"]; +var TRANSIENT_ERROR_STATUS_CODES = [500, 502, 503, 504]; +var NODEJS_TIMEOUT_ERROR_CODES = ["ECONNRESET", "ECONNREFUSED", "EPIPE", "ETIMEDOUT"]; + +// src/index.ts +var isRetryableByTrait = /* @__PURE__ */ __name((error) => error.$retryable !== void 0, "isRetryableByTrait"); +var isClockSkewError = /* @__PURE__ */ __name((error) => CLOCK_SKEW_ERROR_CODES.includes(error.name), "isClockSkewError"); +var isClockSkewCorrectedError = /* @__PURE__ */ __name((error) => { + var _a; + return (_a = error.$metadata) == null ? void 0 : _a.clockSkewCorrected; +}, "isClockSkewCorrectedError"); +var isThrottlingError = /* @__PURE__ */ __name((error) => { + var _a, _b; + return ((_a = error.$metadata) == null ? void 0 : _a.httpStatusCode) === 429 || THROTTLING_ERROR_CODES.includes(error.name) || ((_b = error.$retryable) == null ? void 0 : _b.throttling) == true; +}, "isThrottlingError"); +var isTransientError = /* @__PURE__ */ __name((error, depth = 0) => { + var _a; + return isClockSkewCorrectedError(error) || TRANSIENT_ERROR_CODES.includes(error.name) || NODEJS_TIMEOUT_ERROR_CODES.includes((error == null ? void 0 : error.code) || "") || TRANSIENT_ERROR_STATUS_CODES.includes(((_a = error.$metadata) == null ? void 0 : _a.httpStatusCode) || 0) || error.cause !== void 0 && depth <= 10 && isTransientError(error.cause, depth + 1); +}, "isTransientError"); +var isServerError = /* @__PURE__ */ __name((error) => { + var _a; + if (((_a = error.$metadata) == null ? void 0 : _a.httpStatusCode) !== void 0) { + const statusCode = error.$metadata.httpStatusCode; + if (500 <= statusCode && statusCode <= 599 && !isTransientError(error)) { + return true; + } + return false; + } + return false; +}, "isServerError"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 4172: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getHomeDir = void 0; +const os_1 = __nccwpck_require__(857); +const path_1 = __nccwpck_require__(6928); +const homeDirCache = {}; +const getHomeDirCacheKey = () => { + if (process && process.geteuid) { + return `${process.geteuid()}`; + } + return "DEFAULT"; +}; +const getHomeDir = () => { + const { HOME, USERPROFILE, HOMEPATH, HOMEDRIVE = `C:${path_1.sep}` } = process.env; + if (HOME) + return HOME; + if (USERPROFILE) + return USERPROFILE; + if (HOMEPATH) + return `${HOMEDRIVE}${HOMEPATH}`; + const homeDirCacheKey = getHomeDirCacheKey(); + if (!homeDirCache[homeDirCacheKey]) + homeDirCache[homeDirCacheKey] = (0, os_1.homedir)(); + return homeDirCache[homeDirCacheKey]; +}; +exports.getHomeDir = getHomeDir; + + +/***/ }), + +/***/ 269: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSSOTokenFilepath = void 0; +const crypto_1 = __nccwpck_require__(6982); +const path_1 = __nccwpck_require__(6928); +const getHomeDir_1 = __nccwpck_require__(4172); +const getSSOTokenFilepath = (id) => { + const hasher = (0, crypto_1.createHash)("sha1"); + const cacheName = hasher.update(id).digest("hex"); + return (0, path_1.join)((0, getHomeDir_1.getHomeDir)(), ".aws", "sso", "cache", `${cacheName}.json`); +}; +exports.getSSOTokenFilepath = getSSOTokenFilepath; + + +/***/ }), + +/***/ 1326: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSSOTokenFromFile = void 0; +const fs_1 = __nccwpck_require__(9896); +const getSSOTokenFilepath_1 = __nccwpck_require__(269); +const { readFile } = fs_1.promises; +const getSSOTokenFromFile = async (id) => { + const ssoTokenFilepath = (0, getSSOTokenFilepath_1.getSSOTokenFilepath)(id); + const ssoTokenText = await readFile(ssoTokenFilepath, "utf8"); + return JSON.parse(ssoTokenText); +}; +exports.getSSOTokenFromFile = getSSOTokenFromFile; + + +/***/ }), + +/***/ 4964: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + CONFIG_PREFIX_SEPARATOR: () => CONFIG_PREFIX_SEPARATOR, + DEFAULT_PROFILE: () => DEFAULT_PROFILE, + ENV_PROFILE: () => ENV_PROFILE, + getProfileName: () => getProfileName, + loadSharedConfigFiles: () => loadSharedConfigFiles, + loadSsoSessionData: () => loadSsoSessionData, + parseKnownFiles: () => parseKnownFiles +}); +module.exports = __toCommonJS(src_exports); +__reExport(src_exports, __nccwpck_require__(4172), module.exports); + +// src/getProfileName.ts +var ENV_PROFILE = "AWS_PROFILE"; +var DEFAULT_PROFILE = "default"; +var getProfileName = /* @__PURE__ */ __name((init) => init.profile || process.env[ENV_PROFILE] || DEFAULT_PROFILE, "getProfileName"); + +// src/index.ts +__reExport(src_exports, __nccwpck_require__(269), module.exports); +__reExport(src_exports, __nccwpck_require__(1326), module.exports); + +// src/loadSharedConfigFiles.ts + + +// src/getConfigData.ts +var import_types = __nccwpck_require__(690); +var getConfigData = /* @__PURE__ */ __name((data) => Object.entries(data).filter(([key]) => { + const indexOfSeparator = key.indexOf(CONFIG_PREFIX_SEPARATOR); + if (indexOfSeparator === -1) { + return false; + } + return Object.values(import_types.IniSectionType).includes(key.substring(0, indexOfSeparator)); +}).reduce( + (acc, [key, value]) => { + const indexOfSeparator = key.indexOf(CONFIG_PREFIX_SEPARATOR); + const updatedKey = key.substring(0, indexOfSeparator) === import_types.IniSectionType.PROFILE ? key.substring(indexOfSeparator + 1) : key; + acc[updatedKey] = value; + return acc; + }, + { + // Populate default profile, if present. + ...data.default && { default: data.default } + } +), "getConfigData"); + +// src/getConfigFilepath.ts +var import_path = __nccwpck_require__(6928); +var import_getHomeDir = __nccwpck_require__(4172); +var ENV_CONFIG_PATH = "AWS_CONFIG_FILE"; +var getConfigFilepath = /* @__PURE__ */ __name(() => process.env[ENV_CONFIG_PATH] || (0, import_path.join)((0, import_getHomeDir.getHomeDir)(), ".aws", "config"), "getConfigFilepath"); + +// src/getCredentialsFilepath.ts + +var import_getHomeDir2 = __nccwpck_require__(4172); +var ENV_CREDENTIALS_PATH = "AWS_SHARED_CREDENTIALS_FILE"; +var getCredentialsFilepath = /* @__PURE__ */ __name(() => process.env[ENV_CREDENTIALS_PATH] || (0, import_path.join)((0, import_getHomeDir2.getHomeDir)(), ".aws", "credentials"), "getCredentialsFilepath"); + +// src/loadSharedConfigFiles.ts +var import_getHomeDir3 = __nccwpck_require__(4172); + +// src/parseIni.ts + +var prefixKeyRegex = /^([\w-]+)\s(["'])?([\w-@\+\.%:/]+)\2$/; +var profileNameBlockList = ["__proto__", "profile __proto__"]; +var parseIni = /* @__PURE__ */ __name((iniData) => { + const map = {}; + let currentSection; + let currentSubSection; + for (const iniLine of iniData.split(/\r?\n/)) { + const trimmedLine = iniLine.split(/(^|\s)[;#]/)[0].trim(); + const isSection = trimmedLine[0] === "[" && trimmedLine[trimmedLine.length - 1] === "]"; + if (isSection) { + currentSection = void 0; + currentSubSection = void 0; + const sectionName = trimmedLine.substring(1, trimmedLine.length - 1); + const matches = prefixKeyRegex.exec(sectionName); + if (matches) { + const [, prefix, , name] = matches; + if (Object.values(import_types.IniSectionType).includes(prefix)) { + currentSection = [prefix, name].join(CONFIG_PREFIX_SEPARATOR); + } + } else { + currentSection = sectionName; + } + if (profileNameBlockList.includes(sectionName)) { + throw new Error(`Found invalid profile name "${sectionName}"`); + } + } else if (currentSection) { + const indexOfEqualsSign = trimmedLine.indexOf("="); + if (![0, -1].includes(indexOfEqualsSign)) { + const [name, value] = [ + trimmedLine.substring(0, indexOfEqualsSign).trim(), + trimmedLine.substring(indexOfEqualsSign + 1).trim() + ]; + if (value === "") { + currentSubSection = name; + } else { + if (currentSubSection && iniLine.trimStart() === iniLine) { + currentSubSection = void 0; + } + map[currentSection] = map[currentSection] || {}; + const key = currentSubSection ? [currentSubSection, name].join(CONFIG_PREFIX_SEPARATOR) : name; + map[currentSection][key] = value; + } + } + } + } + return map; +}, "parseIni"); + +// src/loadSharedConfigFiles.ts +var import_slurpFile = __nccwpck_require__(4246); +var swallowError = /* @__PURE__ */ __name(() => ({}), "swallowError"); +var CONFIG_PREFIX_SEPARATOR = "."; +var loadSharedConfigFiles = /* @__PURE__ */ __name(async (init = {}) => { + const { filepath = getCredentialsFilepath(), configFilepath = getConfigFilepath() } = init; + const homeDir = (0, import_getHomeDir3.getHomeDir)(); + const relativeHomeDirPrefix = "~/"; + let resolvedFilepath = filepath; + if (filepath.startsWith(relativeHomeDirPrefix)) { + resolvedFilepath = (0, import_path.join)(homeDir, filepath.slice(2)); + } + let resolvedConfigFilepath = configFilepath; + if (configFilepath.startsWith(relativeHomeDirPrefix)) { + resolvedConfigFilepath = (0, import_path.join)(homeDir, configFilepath.slice(2)); + } + const parsedFiles = await Promise.all([ + (0, import_slurpFile.slurpFile)(resolvedConfigFilepath, { + ignoreCache: init.ignoreCache + }).then(parseIni).then(getConfigData).catch(swallowError), + (0, import_slurpFile.slurpFile)(resolvedFilepath, { + ignoreCache: init.ignoreCache + }).then(parseIni).catch(swallowError) + ]); + return { + configFile: parsedFiles[0], + credentialsFile: parsedFiles[1] + }; +}, "loadSharedConfigFiles"); + +// src/getSsoSessionData.ts + +var getSsoSessionData = /* @__PURE__ */ __name((data) => Object.entries(data).filter(([key]) => key.startsWith(import_types.IniSectionType.SSO_SESSION + CONFIG_PREFIX_SEPARATOR)).reduce((acc, [key, value]) => ({ ...acc, [key.substring(key.indexOf(CONFIG_PREFIX_SEPARATOR) + 1)]: value }), {}), "getSsoSessionData"); + +// src/loadSsoSessionData.ts +var import_slurpFile2 = __nccwpck_require__(4246); +var swallowError2 = /* @__PURE__ */ __name(() => ({}), "swallowError"); +var loadSsoSessionData = /* @__PURE__ */ __name(async (init = {}) => (0, import_slurpFile2.slurpFile)(init.configFilepath ?? getConfigFilepath()).then(parseIni).then(getSsoSessionData).catch(swallowError2), "loadSsoSessionData"); + +// src/mergeConfigFiles.ts +var mergeConfigFiles = /* @__PURE__ */ __name((...files) => { + const merged = {}; + for (const file of files) { + for (const [key, values] of Object.entries(file)) { + if (merged[key] !== void 0) { + Object.assign(merged[key], values); + } else { + merged[key] = values; + } + } + } + return merged; +}, "mergeConfigFiles"); + +// src/parseKnownFiles.ts +var parseKnownFiles = /* @__PURE__ */ __name(async (init) => { + const parsedFiles = await loadSharedConfigFiles(init); + return mergeConfigFiles(parsedFiles.configFile, parsedFiles.credentialsFile); +}, "parseKnownFiles"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 4246: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.slurpFile = void 0; +const fs_1 = __nccwpck_require__(9896); +const { readFile } = fs_1.promises; +const filePromisesHash = {}; +const slurpFile = (path, options) => { + if (!filePromisesHash[path] || (options === null || options === void 0 ? void 0 : options.ignoreCache)) { + filePromisesHash[path] = readFile(path, "utf8"); + } + return filePromisesHash[path]; +}; +exports.slurpFile = slurpFile; + + +/***/ }), + +/***/ 5118: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + SignatureV4: () => SignatureV4, + clearCredentialCache: () => clearCredentialCache, + createScope: () => createScope, + getCanonicalHeaders: () => getCanonicalHeaders, + getCanonicalQuery: () => getCanonicalQuery, + getPayloadHash: () => getPayloadHash, + getSigningKey: () => getSigningKey, + moveHeadersToQuery: () => moveHeadersToQuery, + prepareRequest: () => prepareRequest +}); +module.exports = __toCommonJS(src_exports); + +// src/SignatureV4.ts + +var import_util_middleware = __nccwpck_require__(6324); + +var import_util_utf84 = __nccwpck_require__(1577); + +// src/constants.ts +var ALGORITHM_QUERY_PARAM = "X-Amz-Algorithm"; +var CREDENTIAL_QUERY_PARAM = "X-Amz-Credential"; +var AMZ_DATE_QUERY_PARAM = "X-Amz-Date"; +var SIGNED_HEADERS_QUERY_PARAM = "X-Amz-SignedHeaders"; +var EXPIRES_QUERY_PARAM = "X-Amz-Expires"; +var SIGNATURE_QUERY_PARAM = "X-Amz-Signature"; +var TOKEN_QUERY_PARAM = "X-Amz-Security-Token"; +var AUTH_HEADER = "authorization"; +var AMZ_DATE_HEADER = AMZ_DATE_QUERY_PARAM.toLowerCase(); +var DATE_HEADER = "date"; +var GENERATED_HEADERS = [AUTH_HEADER, AMZ_DATE_HEADER, DATE_HEADER]; +var SIGNATURE_HEADER = SIGNATURE_QUERY_PARAM.toLowerCase(); +var SHA256_HEADER = "x-amz-content-sha256"; +var TOKEN_HEADER = TOKEN_QUERY_PARAM.toLowerCase(); +var ALWAYS_UNSIGNABLE_HEADERS = { + authorization: true, + "cache-control": true, + connection: true, + expect: true, + from: true, + "keep-alive": true, + "max-forwards": true, + pragma: true, + referer: true, + te: true, + trailer: true, + "transfer-encoding": true, + upgrade: true, + "user-agent": true, + "x-amzn-trace-id": true +}; +var PROXY_HEADER_PATTERN = /^proxy-/; +var SEC_HEADER_PATTERN = /^sec-/; +var ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256"; +var EVENT_ALGORITHM_IDENTIFIER = "AWS4-HMAC-SHA256-PAYLOAD"; +var UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; +var MAX_CACHE_SIZE = 50; +var KEY_TYPE_IDENTIFIER = "aws4_request"; +var MAX_PRESIGNED_TTL = 60 * 60 * 24 * 7; + +// src/credentialDerivation.ts +var import_util_hex_encoding = __nccwpck_require__(6435); +var import_util_utf8 = __nccwpck_require__(1577); +var signingKeyCache = {}; +var cacheQueue = []; +var createScope = /* @__PURE__ */ __name((shortDate, region, service) => `${shortDate}/${region}/${service}/${KEY_TYPE_IDENTIFIER}`, "createScope"); +var getSigningKey = /* @__PURE__ */ __name(async (sha256Constructor, credentials, shortDate, region, service) => { + const credsHash = await hmac(sha256Constructor, credentials.secretAccessKey, credentials.accessKeyId); + const cacheKey = `${shortDate}:${region}:${service}:${(0, import_util_hex_encoding.toHex)(credsHash)}:${credentials.sessionToken}`; + if (cacheKey in signingKeyCache) { + return signingKeyCache[cacheKey]; + } + cacheQueue.push(cacheKey); + while (cacheQueue.length > MAX_CACHE_SIZE) { + delete signingKeyCache[cacheQueue.shift()]; + } + let key = `AWS4${credentials.secretAccessKey}`; + for (const signable of [shortDate, region, service, KEY_TYPE_IDENTIFIER]) { + key = await hmac(sha256Constructor, key, signable); + } + return signingKeyCache[cacheKey] = key; +}, "getSigningKey"); +var clearCredentialCache = /* @__PURE__ */ __name(() => { + cacheQueue.length = 0; + Object.keys(signingKeyCache).forEach((cacheKey) => { + delete signingKeyCache[cacheKey]; + }); +}, "clearCredentialCache"); +var hmac = /* @__PURE__ */ __name((ctor, secret, data) => { + const hash = new ctor(secret); + hash.update((0, import_util_utf8.toUint8Array)(data)); + return hash.digest(); +}, "hmac"); + +// src/getCanonicalHeaders.ts +var getCanonicalHeaders = /* @__PURE__ */ __name(({ headers }, unsignableHeaders, signableHeaders) => { + const canonical = {}; + for (const headerName of Object.keys(headers).sort()) { + if (headers[headerName] == void 0) { + continue; + } + const canonicalHeaderName = headerName.toLowerCase(); + if (canonicalHeaderName in ALWAYS_UNSIGNABLE_HEADERS || (unsignableHeaders == null ? void 0 : unsignableHeaders.has(canonicalHeaderName)) || PROXY_HEADER_PATTERN.test(canonicalHeaderName) || SEC_HEADER_PATTERN.test(canonicalHeaderName)) { + if (!signableHeaders || signableHeaders && !signableHeaders.has(canonicalHeaderName)) { + continue; + } + } + canonical[canonicalHeaderName] = headers[headerName].trim().replace(/\s+/g, " "); + } + return canonical; +}, "getCanonicalHeaders"); + +// src/getCanonicalQuery.ts +var import_util_uri_escape = __nccwpck_require__(146); +var getCanonicalQuery = /* @__PURE__ */ __name(({ query = {} }) => { + const keys = []; + const serialized = {}; + for (const key of Object.keys(query)) { + if (key.toLowerCase() === SIGNATURE_HEADER) { + continue; + } + const encodedKey = (0, import_util_uri_escape.escapeUri)(key); + keys.push(encodedKey); + const value = query[key]; + if (typeof value === "string") { + serialized[encodedKey] = `${encodedKey}=${(0, import_util_uri_escape.escapeUri)(value)}`; + } else if (Array.isArray(value)) { + serialized[encodedKey] = value.slice(0).reduce((encoded, value2) => encoded.concat([`${encodedKey}=${(0, import_util_uri_escape.escapeUri)(value2)}`]), []).sort().join("&"); + } + } + return keys.sort().map((key) => serialized[key]).filter((serialized2) => serialized2).join("&"); +}, "getCanonicalQuery"); + +// src/getPayloadHash.ts +var import_is_array_buffer = __nccwpck_require__(6130); + +var import_util_utf82 = __nccwpck_require__(1577); +var getPayloadHash = /* @__PURE__ */ __name(async ({ headers, body }, hashConstructor) => { + for (const headerName of Object.keys(headers)) { + if (headerName.toLowerCase() === SHA256_HEADER) { + return headers[headerName]; + } + } + if (body == void 0) { + return "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"; + } else if (typeof body === "string" || ArrayBuffer.isView(body) || (0, import_is_array_buffer.isArrayBuffer)(body)) { + const hashCtor = new hashConstructor(); + hashCtor.update((0, import_util_utf82.toUint8Array)(body)); + return (0, import_util_hex_encoding.toHex)(await hashCtor.digest()); + } + return UNSIGNED_PAYLOAD; +}, "getPayloadHash"); + +// src/HeaderFormatter.ts + +var import_util_utf83 = __nccwpck_require__(1577); +var _HeaderFormatter = class _HeaderFormatter { + format(headers) { + const chunks = []; + for (const headerName of Object.keys(headers)) { + const bytes = (0, import_util_utf83.fromUtf8)(headerName); + chunks.push(Uint8Array.from([bytes.byteLength]), bytes, this.formatHeaderValue(headers[headerName])); + } + const out = new Uint8Array(chunks.reduce((carry, bytes) => carry + bytes.byteLength, 0)); + let position = 0; + for (const chunk of chunks) { + out.set(chunk, position); + position += chunk.byteLength; + } + return out; + } + formatHeaderValue(header) { + switch (header.type) { + case "boolean": + return Uint8Array.from([header.value ? 0 /* boolTrue */ : 1 /* boolFalse */]); + case "byte": + return Uint8Array.from([2 /* byte */, header.value]); + case "short": + const shortView = new DataView(new ArrayBuffer(3)); + shortView.setUint8(0, 3 /* short */); + shortView.setInt16(1, header.value, false); + return new Uint8Array(shortView.buffer); + case "integer": + const intView = new DataView(new ArrayBuffer(5)); + intView.setUint8(0, 4 /* integer */); + intView.setInt32(1, header.value, false); + return new Uint8Array(intView.buffer); + case "long": + const longBytes = new Uint8Array(9); + longBytes[0] = 5 /* long */; + longBytes.set(header.value.bytes, 1); + return longBytes; + case "binary": + const binView = new DataView(new ArrayBuffer(3 + header.value.byteLength)); + binView.setUint8(0, 6 /* byteArray */); + binView.setUint16(1, header.value.byteLength, false); + const binBytes = new Uint8Array(binView.buffer); + binBytes.set(header.value, 3); + return binBytes; + case "string": + const utf8Bytes = (0, import_util_utf83.fromUtf8)(header.value); + const strView = new DataView(new ArrayBuffer(3 + utf8Bytes.byteLength)); + strView.setUint8(0, 7 /* string */); + strView.setUint16(1, utf8Bytes.byteLength, false); + const strBytes = new Uint8Array(strView.buffer); + strBytes.set(utf8Bytes, 3); + return strBytes; + case "timestamp": + const tsBytes = new Uint8Array(9); + tsBytes[0] = 8 /* timestamp */; + tsBytes.set(Int64.fromNumber(header.value.valueOf()).bytes, 1); + return tsBytes; + case "uuid": + if (!UUID_PATTERN.test(header.value)) { + throw new Error(`Invalid UUID received: ${header.value}`); + } + const uuidBytes = new Uint8Array(17); + uuidBytes[0] = 9 /* uuid */; + uuidBytes.set((0, import_util_hex_encoding.fromHex)(header.value.replace(/\-/g, "")), 1); + return uuidBytes; + } + } +}; +__name(_HeaderFormatter, "HeaderFormatter"); +var HeaderFormatter = _HeaderFormatter; +var UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/; +var _Int64 = class _Int64 { + constructor(bytes) { + this.bytes = bytes; + if (bytes.byteLength !== 8) { + throw new Error("Int64 buffers must be exactly 8 bytes"); + } + } + static fromNumber(number) { + if (number > 9223372036854776e3 || number < -9223372036854776e3) { + throw new Error(`${number} is too large (or, if negative, too small) to represent as an Int64`); + } + const bytes = new Uint8Array(8); + for (let i = 7, remaining = Math.abs(Math.round(number)); i > -1 && remaining > 0; i--, remaining /= 256) { + bytes[i] = remaining; + } + if (number < 0) { + negate(bytes); + } + return new _Int64(bytes); + } + /** + * Called implicitly by infix arithmetic operators. + */ + valueOf() { + const bytes = this.bytes.slice(0); + const negative = bytes[0] & 128; + if (negative) { + negate(bytes); + } + return parseInt((0, import_util_hex_encoding.toHex)(bytes), 16) * (negative ? -1 : 1); + } + toString() { + return String(this.valueOf()); + } +}; +__name(_Int64, "Int64"); +var Int64 = _Int64; +function negate(bytes) { + for (let i = 0; i < 8; i++) { + bytes[i] ^= 255; + } + for (let i = 7; i > -1; i--) { + bytes[i]++; + if (bytes[i] !== 0) + break; + } +} +__name(negate, "negate"); + +// src/headerUtil.ts +var hasHeader = /* @__PURE__ */ __name((soughtHeader, headers) => { + soughtHeader = soughtHeader.toLowerCase(); + for (const headerName of Object.keys(headers)) { + if (soughtHeader === headerName.toLowerCase()) { + return true; + } + } + return false; +}, "hasHeader"); + +// src/moveHeadersToQuery.ts +var import_protocol_http = __nccwpck_require__(2356); +var moveHeadersToQuery = /* @__PURE__ */ __name((request, options = {}) => { + var _a, _b; + const { headers, query = {} } = import_protocol_http.HttpRequest.clone(request); + for (const name of Object.keys(headers)) { + const lname = name.toLowerCase(); + if (lname.slice(0, 6) === "x-amz-" && !((_a = options.unhoistableHeaders) == null ? void 0 : _a.has(lname)) || ((_b = options.hoistableHeaders) == null ? void 0 : _b.has(lname))) { + query[name] = headers[name]; + delete headers[name]; + } + } + return { + ...request, + headers, + query + }; +}, "moveHeadersToQuery"); + +// src/prepareRequest.ts + +var prepareRequest = /* @__PURE__ */ __name((request) => { + request = import_protocol_http.HttpRequest.clone(request); + for (const headerName of Object.keys(request.headers)) { + if (GENERATED_HEADERS.indexOf(headerName.toLowerCase()) > -1) { + delete request.headers[headerName]; + } + } + return request; +}, "prepareRequest"); + +// src/utilDate.ts +var iso8601 = /* @__PURE__ */ __name((time) => toDate(time).toISOString().replace(/\.\d{3}Z$/, "Z"), "iso8601"); +var toDate = /* @__PURE__ */ __name((time) => { + if (typeof time === "number") { + return new Date(time * 1e3); + } + if (typeof time === "string") { + if (Number(time)) { + return new Date(Number(time) * 1e3); + } + return new Date(time); + } + return time; +}, "toDate"); + +// src/SignatureV4.ts +var _SignatureV4 = class _SignatureV4 { + constructor({ + applyChecksum, + credentials, + region, + service, + sha256, + uriEscapePath = true + }) { + this.headerFormatter = new HeaderFormatter(); + this.service = service; + this.sha256 = sha256; + this.uriEscapePath = uriEscapePath; + this.applyChecksum = typeof applyChecksum === "boolean" ? applyChecksum : true; + this.regionProvider = (0, import_util_middleware.normalizeProvider)(region); + this.credentialProvider = (0, import_util_middleware.normalizeProvider)(credentials); + } + async presign(originalRequest, options = {}) { + const { + signingDate = /* @__PURE__ */ new Date(), + expiresIn = 3600, + unsignableHeaders, + unhoistableHeaders, + signableHeaders, + hoistableHeaders, + signingRegion, + signingService + } = options; + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? await this.regionProvider(); + const { longDate, shortDate } = formatDate(signingDate); + if (expiresIn > MAX_PRESIGNED_TTL) { + return Promise.reject( + "Signature version 4 presigned URLs must have an expiration date less than one week in the future" + ); + } + const scope = createScope(shortDate, region, signingService ?? this.service); + const request = moveHeadersToQuery(prepareRequest(originalRequest), { unhoistableHeaders, hoistableHeaders }); + if (credentials.sessionToken) { + request.query[TOKEN_QUERY_PARAM] = credentials.sessionToken; + } + request.query[ALGORITHM_QUERY_PARAM] = ALGORITHM_IDENTIFIER; + request.query[CREDENTIAL_QUERY_PARAM] = `${credentials.accessKeyId}/${scope}`; + request.query[AMZ_DATE_QUERY_PARAM] = longDate; + request.query[EXPIRES_QUERY_PARAM] = expiresIn.toString(10); + const canonicalHeaders = getCanonicalHeaders(request, unsignableHeaders, signableHeaders); + request.query[SIGNED_HEADERS_QUERY_PARAM] = getCanonicalHeaderList(canonicalHeaders); + request.query[SIGNATURE_QUERY_PARAM] = await this.getSignature( + longDate, + scope, + this.getSigningKey(credentials, region, shortDate, signingService), + this.createCanonicalRequest(request, canonicalHeaders, await getPayloadHash(originalRequest, this.sha256)) + ); + return request; + } + async sign(toSign, options) { + if (typeof toSign === "string") { + return this.signString(toSign, options); + } else if (toSign.headers && toSign.payload) { + return this.signEvent(toSign, options); + } else if (toSign.message) { + return this.signMessage(toSign, options); + } else { + return this.signRequest(toSign, options); + } + } + async signEvent({ headers, payload }, { signingDate = /* @__PURE__ */ new Date(), priorSignature, signingRegion, signingService }) { + const region = signingRegion ?? await this.regionProvider(); + const { shortDate, longDate } = formatDate(signingDate); + const scope = createScope(shortDate, region, signingService ?? this.service); + const hashedPayload = await getPayloadHash({ headers: {}, body: payload }, this.sha256); + const hash = new this.sha256(); + hash.update(headers); + const hashedHeaders = (0, import_util_hex_encoding.toHex)(await hash.digest()); + const stringToSign = [ + EVENT_ALGORITHM_IDENTIFIER, + longDate, + scope, + priorSignature, + hashedHeaders, + hashedPayload + ].join("\n"); + return this.signString(stringToSign, { signingDate, signingRegion: region, signingService }); + } + async signMessage(signableMessage, { signingDate = /* @__PURE__ */ new Date(), signingRegion, signingService }) { + const promise = this.signEvent( + { + headers: this.headerFormatter.format(signableMessage.message.headers), + payload: signableMessage.message.body + }, + { + signingDate, + signingRegion, + signingService, + priorSignature: signableMessage.priorSignature + } + ); + return promise.then((signature) => { + return { message: signableMessage.message, signature }; + }); + } + async signString(stringToSign, { signingDate = /* @__PURE__ */ new Date(), signingRegion, signingService } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? await this.regionProvider(); + const { shortDate } = formatDate(signingDate); + const hash = new this.sha256(await this.getSigningKey(credentials, region, shortDate, signingService)); + hash.update((0, import_util_utf84.toUint8Array)(stringToSign)); + return (0, import_util_hex_encoding.toHex)(await hash.digest()); + } + async signRequest(requestToSign, { + signingDate = /* @__PURE__ */ new Date(), + signableHeaders, + unsignableHeaders, + signingRegion, + signingService + } = {}) { + const credentials = await this.credentialProvider(); + this.validateResolvedCredentials(credentials); + const region = signingRegion ?? await this.regionProvider(); + const request = prepareRequest(requestToSign); + const { longDate, shortDate } = formatDate(signingDate); + const scope = createScope(shortDate, region, signingService ?? this.service); + request.headers[AMZ_DATE_HEADER] = longDate; + if (credentials.sessionToken) { + request.headers[TOKEN_HEADER] = credentials.sessionToken; + } + const payloadHash = await getPayloadHash(request, this.sha256); + if (!hasHeader(SHA256_HEADER, request.headers) && this.applyChecksum) { + request.headers[SHA256_HEADER] = payloadHash; + } + const canonicalHeaders = getCanonicalHeaders(request, unsignableHeaders, signableHeaders); + const signature = await this.getSignature( + longDate, + scope, + this.getSigningKey(credentials, region, shortDate, signingService), + this.createCanonicalRequest(request, canonicalHeaders, payloadHash) + ); + request.headers[AUTH_HEADER] = `${ALGORITHM_IDENTIFIER} Credential=${credentials.accessKeyId}/${scope}, SignedHeaders=${getCanonicalHeaderList(canonicalHeaders)}, Signature=${signature}`; + return request; + } + createCanonicalRequest(request, canonicalHeaders, payloadHash) { + const sortedHeaders = Object.keys(canonicalHeaders).sort(); + return `${request.method} +${this.getCanonicalPath(request)} +${getCanonicalQuery(request)} +${sortedHeaders.map((name) => `${name}:${canonicalHeaders[name]}`).join("\n")} + +${sortedHeaders.join(";")} +${payloadHash}`; + } + async createStringToSign(longDate, credentialScope, canonicalRequest) { + const hash = new this.sha256(); + hash.update((0, import_util_utf84.toUint8Array)(canonicalRequest)); + const hashedRequest = await hash.digest(); + return `${ALGORITHM_IDENTIFIER} +${longDate} +${credentialScope} +${(0, import_util_hex_encoding.toHex)(hashedRequest)}`; + } + getCanonicalPath({ path }) { + if (this.uriEscapePath) { + const normalizedPathSegments = []; + for (const pathSegment of path.split("/")) { + if ((pathSegment == null ? void 0 : pathSegment.length) === 0) + continue; + if (pathSegment === ".") + continue; + if (pathSegment === "..") { + normalizedPathSegments.pop(); + } else { + normalizedPathSegments.push(pathSegment); + } + } + const normalizedPath = `${(path == null ? void 0 : path.startsWith("/")) ? "/" : ""}${normalizedPathSegments.join("/")}${normalizedPathSegments.length > 0 && (path == null ? void 0 : path.endsWith("/")) ? "/" : ""}`; + const doubleEncoded = (0, import_util_uri_escape.escapeUri)(normalizedPath); + return doubleEncoded.replace(/%2F/g, "/"); + } + return path; + } + async getSignature(longDate, credentialScope, keyPromise, canonicalRequest) { + const stringToSign = await this.createStringToSign(longDate, credentialScope, canonicalRequest); + const hash = new this.sha256(await keyPromise); + hash.update((0, import_util_utf84.toUint8Array)(stringToSign)); + return (0, import_util_hex_encoding.toHex)(await hash.digest()); + } + getSigningKey(credentials, region, shortDate, service) { + return getSigningKey(this.sha256, credentials, shortDate, region, service || this.service); + } + validateResolvedCredentials(credentials) { + if (typeof credentials !== "object" || // @ts-expect-error: Property 'accessKeyId' does not exist on type 'object'.ts(2339) + typeof credentials.accessKeyId !== "string" || // @ts-expect-error: Property 'secretAccessKey' does not exist on type 'object'.ts(2339) + typeof credentials.secretAccessKey !== "string") { + throw new Error("Resolved credential object is not valid"); + } + } +}; +__name(_SignatureV4, "SignatureV4"); +var SignatureV4 = _SignatureV4; +var formatDate = /* @__PURE__ */ __name((now) => { + const longDate = iso8601(now).replace(/[\-:]/g, ""); + return { + longDate, + shortDate: longDate.slice(0, 8) + }; +}, "formatDate"); +var getCanonicalHeaderList = /* @__PURE__ */ __name((headers) => Object.keys(headers).sort().join(";"), "getCanonicalHeaderList"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 1411: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Client: () => Client, + Command: () => Command, + LazyJsonString: () => LazyJsonString, + NoOpLogger: () => NoOpLogger, + SENSITIVE_STRING: () => SENSITIVE_STRING, + ServiceException: () => ServiceException, + _json: () => _json, + collectBody: () => import_protocols.collectBody, + convertMap: () => convertMap, + createAggregatedClient: () => createAggregatedClient, + dateToUtcString: () => dateToUtcString, + decorateServiceException: () => decorateServiceException, + emitWarningIfUnsupportedVersion: () => emitWarningIfUnsupportedVersion, + expectBoolean: () => expectBoolean, + expectByte: () => expectByte, + expectFloat32: () => expectFloat32, + expectInt: () => expectInt, + expectInt32: () => expectInt32, + expectLong: () => expectLong, + expectNonNull: () => expectNonNull, + expectNumber: () => expectNumber, + expectObject: () => expectObject, + expectShort: () => expectShort, + expectString: () => expectString, + expectUnion: () => expectUnion, + extendedEncodeURIComponent: () => import_protocols.extendedEncodeURIComponent, + getArrayIfSingleItem: () => getArrayIfSingleItem, + getDefaultClientConfiguration: () => getDefaultClientConfiguration, + getDefaultExtensionConfiguration: () => getDefaultExtensionConfiguration, + getValueFromTextNode: () => getValueFromTextNode, + handleFloat: () => handleFloat, + isSerializableHeaderValue: () => isSerializableHeaderValue, + limitedParseDouble: () => limitedParseDouble, + limitedParseFloat: () => limitedParseFloat, + limitedParseFloat32: () => limitedParseFloat32, + loadConfigsForDefaultMode: () => loadConfigsForDefaultMode, + logger: () => logger, + map: () => map, + parseBoolean: () => parseBoolean, + parseEpochTimestamp: () => parseEpochTimestamp, + parseRfc3339DateTime: () => parseRfc3339DateTime, + parseRfc3339DateTimeWithOffset: () => parseRfc3339DateTimeWithOffset, + parseRfc7231DateTime: () => parseRfc7231DateTime, + quoteHeader: () => quoteHeader, + resolveDefaultRuntimeConfig: () => resolveDefaultRuntimeConfig, + resolvedPath: () => import_protocols.resolvedPath, + serializeDateTime: () => serializeDateTime, + serializeFloat: () => serializeFloat, + splitEvery: () => splitEvery, + splitHeader: () => splitHeader, + strictParseByte: () => strictParseByte, + strictParseDouble: () => strictParseDouble, + strictParseFloat: () => strictParseFloat, + strictParseFloat32: () => strictParseFloat32, + strictParseInt: () => strictParseInt, + strictParseInt32: () => strictParseInt32, + strictParseLong: () => strictParseLong, + strictParseShort: () => strictParseShort, + take: () => take, + throwDefaultError: () => throwDefaultError, + withBaseException: () => withBaseException +}); +module.exports = __toCommonJS(src_exports); + +// src/client.ts +var import_middleware_stack = __nccwpck_require__(9208); +var _Client = class _Client { + constructor(config) { + this.config = config; + this.middlewareStack = (0, import_middleware_stack.constructStack)(); + } + send(command, optionsOrCb, cb) { + const options = typeof optionsOrCb !== "function" ? optionsOrCb : void 0; + const callback = typeof optionsOrCb === "function" ? optionsOrCb : cb; + const useHandlerCache = options === void 0 && this.config.cacheMiddleware === true; + let handler; + if (useHandlerCache) { + if (!this.handlers) { + this.handlers = /* @__PURE__ */ new WeakMap(); + } + const handlers = this.handlers; + if (handlers.has(command.constructor)) { + handler = handlers.get(command.constructor); + } else { + handler = command.resolveMiddleware(this.middlewareStack, this.config, options); + handlers.set(command.constructor, handler); + } + } else { + delete this.handlers; + handler = command.resolveMiddleware(this.middlewareStack, this.config, options); + } + if (callback) { + handler(command).then( + (result) => callback(null, result.output), + (err) => callback(err) + ).catch( + // prevent any errors thrown in the callback from triggering an + // unhandled promise rejection + () => { + } + ); + } else { + return handler(command).then((result) => result.output); + } + } + destroy() { + var _a, _b, _c; + (_c = (_b = (_a = this.config) == null ? void 0 : _a.requestHandler) == null ? void 0 : _b.destroy) == null ? void 0 : _c.call(_b); + delete this.handlers; + } +}; +__name(_Client, "Client"); +var Client = _Client; + +// src/collect-stream-body.ts +var import_protocols = __nccwpck_require__(3422); + +// src/command.ts + +var import_types = __nccwpck_require__(690); +var _Command = class _Command { + constructor() { + this.middlewareStack = (0, import_middleware_stack.constructStack)(); + } + /** + * Factory for Command ClassBuilder. + * @internal + */ + static classBuilder() { + return new ClassBuilder(); + } + /** + * @internal + */ + resolveMiddlewareWithContext(clientStack, configuration, options, { + middlewareFn, + clientName, + commandName, + inputFilterSensitiveLog, + outputFilterSensitiveLog, + smithyContext, + additionalContext, + CommandCtor + }) { + for (const mw of middlewareFn.bind(this)(CommandCtor, clientStack, configuration, options)) { + this.middlewareStack.use(mw); + } + const stack = clientStack.concat(this.middlewareStack); + const { logger: logger2 } = configuration; + const handlerExecutionContext = { + logger: logger2, + clientName, + commandName, + inputFilterSensitiveLog, + outputFilterSensitiveLog, + [import_types.SMITHY_CONTEXT_KEY]: { + commandInstance: this, + ...smithyContext + }, + ...additionalContext + }; + const { requestHandler } = configuration; + return stack.resolve( + (request) => requestHandler.handle(request.request, options || {}), + handlerExecutionContext + ); + } +}; +__name(_Command, "Command"); +var Command = _Command; +var _ClassBuilder = class _ClassBuilder { + constructor() { + this._init = () => { + }; + this._ep = {}; + this._middlewareFn = () => []; + this._commandName = ""; + this._clientName = ""; + this._additionalContext = {}; + this._smithyContext = {}; + this._inputFilterSensitiveLog = (_) => _; + this._outputFilterSensitiveLog = (_) => _; + this._serializer = null; + this._deserializer = null; + } + /** + * Optional init callback. + */ + init(cb) { + this._init = cb; + } + /** + * Set the endpoint parameter instructions. + */ + ep(endpointParameterInstructions) { + this._ep = endpointParameterInstructions; + return this; + } + /** + * Add any number of middleware. + */ + m(middlewareSupplier) { + this._middlewareFn = middlewareSupplier; + return this; + } + /** + * Set the initial handler execution context Smithy field. + */ + s(service, operation, smithyContext = {}) { + this._smithyContext = { + service, + operation, + ...smithyContext + }; + return this; + } + /** + * Set the initial handler execution context. + */ + c(additionalContext = {}) { + this._additionalContext = additionalContext; + return this; + } + /** + * Set constant string identifiers for the operation. + */ + n(clientName, commandName) { + this._clientName = clientName; + this._commandName = commandName; + return this; + } + /** + * Set the input and output sensistive log filters. + */ + f(inputFilter = (_) => _, outputFilter = (_) => _) { + this._inputFilterSensitiveLog = inputFilter; + this._outputFilterSensitiveLog = outputFilter; + return this; + } + /** + * Sets the serializer. + */ + ser(serializer) { + this._serializer = serializer; + return this; + } + /** + * Sets the deserializer. + */ + de(deserializer) { + this._deserializer = deserializer; + return this; + } + /** + * @returns a Command class with the classBuilder properties. + */ + build() { + var _a; + const closure = this; + let CommandRef; + return CommandRef = (_a = class extends Command { + /** + * @public + */ + constructor(...[input]) { + super(); + /** + * @internal + */ + // @ts-ignore used in middlewareFn closure. + this.serialize = closure._serializer; + /** + * @internal + */ + // @ts-ignore used in middlewareFn closure. + this.deserialize = closure._deserializer; + this.input = input ?? {}; + closure._init(this); + } + /** + * @public + */ + static getEndpointParameterInstructions() { + return closure._ep; + } + /** + * @internal + */ + resolveMiddleware(stack, configuration, options) { + return this.resolveMiddlewareWithContext(stack, configuration, options, { + CommandCtor: CommandRef, + middlewareFn: closure._middlewareFn, + clientName: closure._clientName, + commandName: closure._commandName, + inputFilterSensitiveLog: closure._inputFilterSensitiveLog, + outputFilterSensitiveLog: closure._outputFilterSensitiveLog, + smithyContext: closure._smithyContext, + additionalContext: closure._additionalContext + }); + } + }, __name(_a, "CommandRef"), _a); + } +}; +__name(_ClassBuilder, "ClassBuilder"); +var ClassBuilder = _ClassBuilder; + +// src/constants.ts +var SENSITIVE_STRING = "***SensitiveInformation***"; + +// src/create-aggregated-client.ts +var createAggregatedClient = /* @__PURE__ */ __name((commands, Client2) => { + for (const command of Object.keys(commands)) { + const CommandCtor = commands[command]; + const methodImpl = /* @__PURE__ */ __name(async function(args, optionsOrCb, cb) { + const command2 = new CommandCtor(args); + if (typeof optionsOrCb === "function") { + this.send(command2, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") + throw new Error(`Expected http options but got ${typeof optionsOrCb}`); + this.send(command2, optionsOrCb || {}, cb); + } else { + return this.send(command2, optionsOrCb); + } + }, "methodImpl"); + const methodName = (command[0].toLowerCase() + command.slice(1)).replace(/Command$/, ""); + Client2.prototype[methodName] = methodImpl; + } +}, "createAggregatedClient"); + +// src/parse-utils.ts +var parseBoolean = /* @__PURE__ */ __name((value) => { + switch (value) { + case "true": + return true; + case "false": + return false; + default: + throw new Error(`Unable to parse boolean value "${value}"`); + } +}, "parseBoolean"); +var expectBoolean = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "number") { + if (value === 0 || value === 1) { + logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (value === 0) { + return false; + } + if (value === 1) { + return true; + } + } + if (typeof value === "string") { + const lower = value.toLowerCase(); + if (lower === "false" || lower === "true") { + logger.warn(stackTraceWarning(`Expected boolean, got ${typeof value}: ${value}`)); + } + if (lower === "false") { + return false; + } + if (lower === "true") { + return true; + } + } + if (typeof value === "boolean") { + return value; + } + throw new TypeError(`Expected boolean, got ${typeof value}: ${value}`); +}, "expectBoolean"); +var expectNumber = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "string") { + const parsed = parseFloat(value); + if (!Number.isNaN(parsed)) { + if (String(parsed) !== String(value)) { + logger.warn(stackTraceWarning(`Expected number but observed string: ${value}`)); + } + return parsed; + } + } + if (typeof value === "number") { + return value; + } + throw new TypeError(`Expected number, got ${typeof value}: ${value}`); +}, "expectNumber"); +var MAX_FLOAT = Math.ceil(2 ** 127 * (2 - 2 ** -23)); +var expectFloat32 = /* @__PURE__ */ __name((value) => { + const expected = expectNumber(value); + if (expected !== void 0 && !Number.isNaN(expected) && expected !== Infinity && expected !== -Infinity) { + if (Math.abs(expected) > MAX_FLOAT) { + throw new TypeError(`Expected 32-bit float, got ${value}`); + } + } + return expected; +}, "expectFloat32"); +var expectLong = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (Number.isInteger(value) && !Number.isNaN(value)) { + return value; + } + throw new TypeError(`Expected integer, got ${typeof value}: ${value}`); +}, "expectLong"); +var expectInt = expectLong; +var expectInt32 = /* @__PURE__ */ __name((value) => expectSizedInt(value, 32), "expectInt32"); +var expectShort = /* @__PURE__ */ __name((value) => expectSizedInt(value, 16), "expectShort"); +var expectByte = /* @__PURE__ */ __name((value) => expectSizedInt(value, 8), "expectByte"); +var expectSizedInt = /* @__PURE__ */ __name((value, size) => { + const expected = expectLong(value); + if (expected !== void 0 && castInt(expected, size) !== expected) { + throw new TypeError(`Expected ${size}-bit integer, got ${value}`); + } + return expected; +}, "expectSizedInt"); +var castInt = /* @__PURE__ */ __name((value, size) => { + switch (size) { + case 32: + return Int32Array.of(value)[0]; + case 16: + return Int16Array.of(value)[0]; + case 8: + return Int8Array.of(value)[0]; + } +}, "castInt"); +var expectNonNull = /* @__PURE__ */ __name((value, location) => { + if (value === null || value === void 0) { + if (location) { + throw new TypeError(`Expected a non-null value for ${location}`); + } + throw new TypeError("Expected a non-null value"); + } + return value; +}, "expectNonNull"); +var expectObject = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "object" && !Array.isArray(value)) { + return value; + } + const receivedType = Array.isArray(value) ? "array" : typeof value; + throw new TypeError(`Expected object, got ${receivedType}: ${value}`); +}, "expectObject"); +var expectString = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value === "string") { + return value; + } + if (["boolean", "number", "bigint"].includes(typeof value)) { + logger.warn(stackTraceWarning(`Expected string, got ${typeof value}: ${value}`)); + return String(value); + } + throw new TypeError(`Expected string, got ${typeof value}: ${value}`); +}, "expectString"); +var expectUnion = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + const asObject = expectObject(value); + const setKeys = Object.entries(asObject).filter(([, v]) => v != null).map(([k]) => k); + if (setKeys.length === 0) { + throw new TypeError(`Unions must have exactly one non-null member. None were found.`); + } + if (setKeys.length > 1) { + throw new TypeError(`Unions must have exactly one non-null member. Keys ${setKeys} were not null.`); + } + return asObject; +}, "expectUnion"); +var strictParseDouble = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return expectNumber(parseNumber(value)); + } + return expectNumber(value); +}, "strictParseDouble"); +var strictParseFloat = strictParseDouble; +var strictParseFloat32 = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return expectFloat32(parseNumber(value)); + } + return expectFloat32(value); +}, "strictParseFloat32"); +var NUMBER_REGEX = /(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)|(-?Infinity)|(NaN)/g; +var parseNumber = /* @__PURE__ */ __name((value) => { + const matches = value.match(NUMBER_REGEX); + if (matches === null || matches[0].length !== value.length) { + throw new TypeError(`Expected real number, got implicit NaN`); + } + return parseFloat(value); +}, "parseNumber"); +var limitedParseDouble = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return expectNumber(value); +}, "limitedParseDouble"); +var handleFloat = limitedParseDouble; +var limitedParseFloat = limitedParseDouble; +var limitedParseFloat32 = /* @__PURE__ */ __name((value) => { + if (typeof value == "string") { + return parseFloatString(value); + } + return expectFloat32(value); +}, "limitedParseFloat32"); +var parseFloatString = /* @__PURE__ */ __name((value) => { + switch (value) { + case "NaN": + return NaN; + case "Infinity": + return Infinity; + case "-Infinity": + return -Infinity; + default: + throw new Error(`Unable to parse float value: ${value}`); + } +}, "parseFloatString"); +var strictParseLong = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectLong(parseNumber(value)); + } + return expectLong(value); +}, "strictParseLong"); +var strictParseInt = strictParseLong; +var strictParseInt32 = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectInt32(parseNumber(value)); + } + return expectInt32(value); +}, "strictParseInt32"); +var strictParseShort = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectShort(parseNumber(value)); + } + return expectShort(value); +}, "strictParseShort"); +var strictParseByte = /* @__PURE__ */ __name((value) => { + if (typeof value === "string") { + return expectByte(parseNumber(value)); + } + return expectByte(value); +}, "strictParseByte"); +var stackTraceWarning = /* @__PURE__ */ __name((message) => { + return String(new TypeError(message).stack || message).split("\n").slice(0, 5).filter((s) => !s.includes("stackTraceWarning")).join("\n"); +}, "stackTraceWarning"); +var logger = { + warn: console.warn +}; + +// src/date-utils.ts +var DAYS = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]; +var MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]; +function dateToUtcString(date) { + const year = date.getUTCFullYear(); + const month = date.getUTCMonth(); + const dayOfWeek = date.getUTCDay(); + const dayOfMonthInt = date.getUTCDate(); + const hoursInt = date.getUTCHours(); + const minutesInt = date.getUTCMinutes(); + const secondsInt = date.getUTCSeconds(); + const dayOfMonthString = dayOfMonthInt < 10 ? `0${dayOfMonthInt}` : `${dayOfMonthInt}`; + const hoursString = hoursInt < 10 ? `0${hoursInt}` : `${hoursInt}`; + const minutesString = minutesInt < 10 ? `0${minutesInt}` : `${minutesInt}`; + const secondsString = secondsInt < 10 ? `0${secondsInt}` : `${secondsInt}`; + return `${DAYS[dayOfWeek]}, ${dayOfMonthString} ${MONTHS[month]} ${year} ${hoursString}:${minutesString}:${secondsString} GMT`; +} +__name(dateToUtcString, "dateToUtcString"); +var RFC3339 = new RegExp(/^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?[zZ]$/); +var parseRfc3339DateTime = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds] = match; + const year = strictParseShort(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + return buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); +}, "parseRfc3339DateTime"); +var RFC3339_WITH_OFFSET = new RegExp( + /^(\d{4})-(\d{2})-(\d{2})[tT](\d{2}):(\d{2}):(\d{2})(?:\.(\d+))?(([-+]\d{2}\:\d{2})|[zZ])$/ +); +var parseRfc3339DateTimeWithOffset = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value !== "string") { + throw new TypeError("RFC-3339 date-times must be expressed as strings"); + } + const match = RFC3339_WITH_OFFSET.exec(value); + if (!match) { + throw new TypeError("Invalid RFC-3339 date-time value"); + } + const [_, yearStr, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, offsetStr] = match; + const year = strictParseShort(stripLeadingZeroes(yearStr)); + const month = parseDateValue(monthStr, "month", 1, 12); + const day = parseDateValue(dayStr, "day", 1, 31); + const date = buildDate(year, month, day, { hours, minutes, seconds, fractionalMilliseconds }); + if (offsetStr.toUpperCase() != "Z") { + date.setTime(date.getTime() - parseOffsetToMilliseconds(offsetStr)); + } + return date; +}, "parseRfc3339DateTimeWithOffset"); +var IMF_FIXDATE = new RegExp( + /^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), (\d{2}) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) (\d{4}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/ +); +var RFC_850_DATE = new RegExp( + /^(?:Monday|Tuesday|Wednesday|Thursday|Friday|Saturday|Sunday), (\d{2})-(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)-(\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? GMT$/ +); +var ASC_TIME = new RegExp( + /^(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) ( [1-9]|\d{2}) (\d{1,2}):(\d{2}):(\d{2})(?:\.(\d+))? (\d{4})$/ +); +var parseRfc7231DateTime = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + if (typeof value !== "string") { + throw new TypeError("RFC-7231 date-times must be expressed as strings"); + } + let match = IMF_FIXDATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return buildDate( + strictParseShort(stripLeadingZeroes(yearStr)), + parseMonthByShortName(monthStr), + parseDateValue(dayStr, "day", 1, 31), + { hours, minutes, seconds, fractionalMilliseconds } + ); + } + match = RFC_850_DATE.exec(value); + if (match) { + const [_, dayStr, monthStr, yearStr, hours, minutes, seconds, fractionalMilliseconds] = match; + return adjustRfc850Year( + buildDate(parseTwoDigitYear(yearStr), parseMonthByShortName(monthStr), parseDateValue(dayStr, "day", 1, 31), { + hours, + minutes, + seconds, + fractionalMilliseconds + }) + ); + } + match = ASC_TIME.exec(value); + if (match) { + const [_, monthStr, dayStr, hours, minutes, seconds, fractionalMilliseconds, yearStr] = match; + return buildDate( + strictParseShort(stripLeadingZeroes(yearStr)), + parseMonthByShortName(monthStr), + parseDateValue(dayStr.trimLeft(), "day", 1, 31), + { hours, minutes, seconds, fractionalMilliseconds } + ); + } + throw new TypeError("Invalid RFC-7231 date-time value"); +}, "parseRfc7231DateTime"); +var parseEpochTimestamp = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return void 0; + } + let valueAsDouble; + if (typeof value === "number") { + valueAsDouble = value; + } else if (typeof value === "string") { + valueAsDouble = strictParseDouble(value); + } else if (typeof value === "object" && value.tag === 1) { + valueAsDouble = value.value; + } else { + throw new TypeError("Epoch timestamps must be expressed as floating point numbers or their string representation"); + } + if (Number.isNaN(valueAsDouble) || valueAsDouble === Infinity || valueAsDouble === -Infinity) { + throw new TypeError("Epoch timestamps must be valid, non-Infinite, non-NaN numerics"); + } + return new Date(Math.round(valueAsDouble * 1e3)); +}, "parseEpochTimestamp"); +var buildDate = /* @__PURE__ */ __name((year, month, day, time) => { + const adjustedMonth = month - 1; + validateDayOfMonth(year, adjustedMonth, day); + return new Date( + Date.UTC( + year, + adjustedMonth, + day, + parseDateValue(time.hours, "hour", 0, 23), + parseDateValue(time.minutes, "minute", 0, 59), + // seconds can go up to 60 for leap seconds + parseDateValue(time.seconds, "seconds", 0, 60), + parseMilliseconds(time.fractionalMilliseconds) + ) + ); +}, "buildDate"); +var parseTwoDigitYear = /* @__PURE__ */ __name((value) => { + const thisYear = (/* @__PURE__ */ new Date()).getUTCFullYear(); + const valueInThisCentury = Math.floor(thisYear / 100) * 100 + strictParseShort(stripLeadingZeroes(value)); + if (valueInThisCentury < thisYear) { + return valueInThisCentury + 100; + } + return valueInThisCentury; +}, "parseTwoDigitYear"); +var FIFTY_YEARS_IN_MILLIS = 50 * 365 * 24 * 60 * 60 * 1e3; +var adjustRfc850Year = /* @__PURE__ */ __name((input) => { + if (input.getTime() - (/* @__PURE__ */ new Date()).getTime() > FIFTY_YEARS_IN_MILLIS) { + return new Date( + Date.UTC( + input.getUTCFullYear() - 100, + input.getUTCMonth(), + input.getUTCDate(), + input.getUTCHours(), + input.getUTCMinutes(), + input.getUTCSeconds(), + input.getUTCMilliseconds() + ) + ); + } + return input; +}, "adjustRfc850Year"); +var parseMonthByShortName = /* @__PURE__ */ __name((value) => { + const monthIdx = MONTHS.indexOf(value); + if (monthIdx < 0) { + throw new TypeError(`Invalid month: ${value}`); + } + return monthIdx + 1; +}, "parseMonthByShortName"); +var DAYS_IN_MONTH = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]; +var validateDayOfMonth = /* @__PURE__ */ __name((year, month, day) => { + let maxDays = DAYS_IN_MONTH[month]; + if (month === 1 && isLeapYear(year)) { + maxDays = 29; + } + if (day > maxDays) { + throw new TypeError(`Invalid day for ${MONTHS[month]} in ${year}: ${day}`); + } +}, "validateDayOfMonth"); +var isLeapYear = /* @__PURE__ */ __name((year) => { + return year % 4 === 0 && (year % 100 !== 0 || year % 400 === 0); +}, "isLeapYear"); +var parseDateValue = /* @__PURE__ */ __name((value, type, lower, upper) => { + const dateVal = strictParseByte(stripLeadingZeroes(value)); + if (dateVal < lower || dateVal > upper) { + throw new TypeError(`${type} must be between ${lower} and ${upper}, inclusive`); + } + return dateVal; +}, "parseDateValue"); +var parseMilliseconds = /* @__PURE__ */ __name((value) => { + if (value === null || value === void 0) { + return 0; + } + return strictParseFloat32("0." + value) * 1e3; +}, "parseMilliseconds"); +var parseOffsetToMilliseconds = /* @__PURE__ */ __name((value) => { + const directionStr = value[0]; + let direction = 1; + if (directionStr == "+") { + direction = 1; + } else if (directionStr == "-") { + direction = -1; + } else { + throw new TypeError(`Offset direction, ${directionStr}, must be "+" or "-"`); + } + const hour = Number(value.substring(1, 3)); + const minute = Number(value.substring(4, 6)); + return direction * (hour * 60 + minute) * 60 * 1e3; +}, "parseOffsetToMilliseconds"); +var stripLeadingZeroes = /* @__PURE__ */ __name((value) => { + let idx = 0; + while (idx < value.length - 1 && value.charAt(idx) === "0") { + idx++; + } + if (idx === 0) { + return value; + } + return value.slice(idx); +}, "stripLeadingZeroes"); + +// src/exceptions.ts +var _ServiceException = class _ServiceException extends Error { + constructor(options) { + super(options.message); + Object.setPrototypeOf(this, _ServiceException.prototype); + this.name = options.name; + this.$fault = options.$fault; + this.$metadata = options.$metadata; + } +}; +__name(_ServiceException, "ServiceException"); +var ServiceException = _ServiceException; +var decorateServiceException = /* @__PURE__ */ __name((exception, additions = {}) => { + Object.entries(additions).filter(([, v]) => v !== void 0).forEach(([k, v]) => { + if (exception[k] == void 0 || exception[k] === "") { + exception[k] = v; + } + }); + const message = exception.message || exception.Message || "UnknownError"; + exception.message = message; + delete exception.Message; + return exception; +}, "decorateServiceException"); + +// src/default-error-handler.ts +var throwDefaultError = /* @__PURE__ */ __name(({ output, parsedBody, exceptionCtor, errorCode }) => { + const $metadata = deserializeMetadata(output); + const statusCode = $metadata.httpStatusCode ? $metadata.httpStatusCode + "" : void 0; + const response = new exceptionCtor({ + name: (parsedBody == null ? void 0 : parsedBody.code) || (parsedBody == null ? void 0 : parsedBody.Code) || errorCode || statusCode || "UnknownError", + $fault: "client", + $metadata + }); + throw decorateServiceException(response, parsedBody); +}, "throwDefaultError"); +var withBaseException = /* @__PURE__ */ __name((ExceptionCtor) => { + return ({ output, parsedBody, errorCode }) => { + throwDefaultError({ output, parsedBody, exceptionCtor: ExceptionCtor, errorCode }); + }; +}, "withBaseException"); +var deserializeMetadata = /* @__PURE__ */ __name((output) => ({ + httpStatusCode: output.statusCode, + requestId: output.headers["x-amzn-requestid"] ?? output.headers["x-amzn-request-id"] ?? output.headers["x-amz-request-id"], + extendedRequestId: output.headers["x-amz-id-2"], + cfId: output.headers["x-amz-cf-id"] +}), "deserializeMetadata"); + +// src/defaults-mode.ts +var loadConfigsForDefaultMode = /* @__PURE__ */ __name((mode) => { + switch (mode) { + case "standard": + return { + retryMode: "standard", + connectionTimeout: 3100 + }; + case "in-region": + return { + retryMode: "standard", + connectionTimeout: 1100 + }; + case "cross-region": + return { + retryMode: "standard", + connectionTimeout: 3100 + }; + case "mobile": + return { + retryMode: "standard", + connectionTimeout: 3e4 + }; + default: + return {}; + } +}, "loadConfigsForDefaultMode"); + +// src/emitWarningIfUnsupportedVersion.ts +var warningEmitted = false; +var emitWarningIfUnsupportedVersion = /* @__PURE__ */ __name((version) => { + if (version && !warningEmitted && parseInt(version.substring(1, version.indexOf("."))) < 16) { + warningEmitted = true; + } +}, "emitWarningIfUnsupportedVersion"); + +// src/extended-encode-uri-component.ts + + +// src/extensions/checksum.ts + +var getChecksumConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const checksumAlgorithms = []; + for (const id in import_types.AlgorithmId) { + const algorithmId = import_types.AlgorithmId[id]; + if (runtimeConfig[algorithmId] === void 0) { + continue; + } + checksumAlgorithms.push({ + algorithmId: () => algorithmId, + checksumConstructor: () => runtimeConfig[algorithmId] + }); + } + return { + _checksumAlgorithms: checksumAlgorithms, + addChecksumAlgorithm(algo) { + this._checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return this._checksumAlgorithms; + } + }; +}, "getChecksumConfiguration"); +var resolveChecksumRuntimeConfig = /* @__PURE__ */ __name((clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}, "resolveChecksumRuntimeConfig"); + +// src/extensions/retry.ts +var getRetryConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + let _retryStrategy = runtimeConfig.retryStrategy; + return { + setRetryStrategy(retryStrategy) { + _retryStrategy = retryStrategy; + }, + retryStrategy() { + return _retryStrategy; + } + }; +}, "getRetryConfiguration"); +var resolveRetryRuntimeConfig = /* @__PURE__ */ __name((retryStrategyConfiguration) => { + const runtimeConfig = {}; + runtimeConfig.retryStrategy = retryStrategyConfiguration.retryStrategy(); + return runtimeConfig; +}, "resolveRetryRuntimeConfig"); + +// src/extensions/defaultExtensionConfiguration.ts +var getDefaultExtensionConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return { + ...getChecksumConfiguration(runtimeConfig), + ...getRetryConfiguration(runtimeConfig) + }; +}, "getDefaultExtensionConfiguration"); +var getDefaultClientConfiguration = getDefaultExtensionConfiguration; +var resolveDefaultRuntimeConfig = /* @__PURE__ */ __name((config) => { + return { + ...resolveChecksumRuntimeConfig(config), + ...resolveRetryRuntimeConfig(config) + }; +}, "resolveDefaultRuntimeConfig"); + +// src/get-array-if-single-item.ts +var getArrayIfSingleItem = /* @__PURE__ */ __name((mayBeArray) => Array.isArray(mayBeArray) ? mayBeArray : [mayBeArray], "getArrayIfSingleItem"); + +// src/get-value-from-text-node.ts +var getValueFromTextNode = /* @__PURE__ */ __name((obj) => { + const textNodeName = "#text"; + for (const key in obj) { + if (obj.hasOwnProperty(key) && obj[key][textNodeName] !== void 0) { + obj[key] = obj[key][textNodeName]; + } else if (typeof obj[key] === "object" && obj[key] !== null) { + obj[key] = getValueFromTextNode(obj[key]); + } + } + return obj; +}, "getValueFromTextNode"); + +// src/is-serializable-header-value.ts +var isSerializableHeaderValue = /* @__PURE__ */ __name((value) => { + return value != null; +}, "isSerializableHeaderValue"); + +// src/lazy-json.ts +var LazyJsonString = /* @__PURE__ */ __name(function LazyJsonString2(val) { + const str = Object.assign(new String(val), { + deserializeJSON() { + return JSON.parse(String(val)); + }, + toString() { + return String(val); + }, + toJSON() { + return String(val); + } + }); + return str; +}, "LazyJsonString"); +LazyJsonString.from = (object) => { + if (object && typeof object === "object" && (object instanceof LazyJsonString || "deserializeJSON" in object)) { + return object; + } else if (typeof object === "string" || Object.getPrototypeOf(object) === String.prototype) { + return LazyJsonString(String(object)); + } + return LazyJsonString(JSON.stringify(object)); +}; +LazyJsonString.fromObject = LazyJsonString.from; + +// src/NoOpLogger.ts +var _NoOpLogger = class _NoOpLogger { + trace() { + } + debug() { + } + info() { + } + warn() { + } + error() { + } +}; +__name(_NoOpLogger, "NoOpLogger"); +var NoOpLogger = _NoOpLogger; + +// src/object-mapping.ts +function map(arg0, arg1, arg2) { + let target; + let filter; + let instructions; + if (typeof arg1 === "undefined" && typeof arg2 === "undefined") { + target = {}; + instructions = arg0; + } else { + target = arg0; + if (typeof arg1 === "function") { + filter = arg1; + instructions = arg2; + return mapWithFilter(target, filter, instructions); + } else { + instructions = arg1; + } + } + for (const key of Object.keys(instructions)) { + if (!Array.isArray(instructions[key])) { + target[key] = instructions[key]; + continue; + } + applyInstruction(target, null, instructions, key); + } + return target; +} +__name(map, "map"); +var convertMap = /* @__PURE__ */ __name((target) => { + const output = {}; + for (const [k, v] of Object.entries(target || {})) { + output[k] = [, v]; + } + return output; +}, "convertMap"); +var take = /* @__PURE__ */ __name((source, instructions) => { + const out = {}; + for (const key in instructions) { + applyInstruction(out, source, instructions, key); + } + return out; +}, "take"); +var mapWithFilter = /* @__PURE__ */ __name((target, filter, instructions) => { + return map( + target, + Object.entries(instructions).reduce( + (_instructions, [key, value]) => { + if (Array.isArray(value)) { + _instructions[key] = value; + } else { + if (typeof value === "function") { + _instructions[key] = [filter, value()]; + } else { + _instructions[key] = [filter, value]; + } + } + return _instructions; + }, + {} + ) + ); +}, "mapWithFilter"); +var applyInstruction = /* @__PURE__ */ __name((target, source, instructions, targetKey) => { + if (source !== null) { + let instruction = instructions[targetKey]; + if (typeof instruction === "function") { + instruction = [, instruction]; + } + const [filter2 = nonNullish, valueFn = pass, sourceKey = targetKey] = instruction; + if (typeof filter2 === "function" && filter2(source[sourceKey]) || typeof filter2 !== "function" && !!filter2) { + target[targetKey] = valueFn(source[sourceKey]); + } + return; + } + let [filter, value] = instructions[targetKey]; + if (typeof value === "function") { + let _value; + const defaultFilterPassed = filter === void 0 && (_value = value()) != null; + const customFilterPassed = typeof filter === "function" && !!filter(void 0) || typeof filter !== "function" && !!filter; + if (defaultFilterPassed) { + target[targetKey] = _value; + } else if (customFilterPassed) { + target[targetKey] = value(); + } + } else { + const defaultFilterPassed = filter === void 0 && value != null; + const customFilterPassed = typeof filter === "function" && !!filter(value) || typeof filter !== "function" && !!filter; + if (defaultFilterPassed || customFilterPassed) { + target[targetKey] = value; + } + } +}, "applyInstruction"); +var nonNullish = /* @__PURE__ */ __name((_) => _ != null, "nonNullish"); +var pass = /* @__PURE__ */ __name((_) => _, "pass"); + +// src/quote-header.ts +function quoteHeader(part) { + if (part.includes(",") || part.includes('"')) { + part = `"${part.replace(/"/g, '\\"')}"`; + } + return part; +} +__name(quoteHeader, "quoteHeader"); + +// src/resolve-path.ts + + +// src/ser-utils.ts +var serializeFloat = /* @__PURE__ */ __name((value) => { + if (value !== value) { + return "NaN"; + } + switch (value) { + case Infinity: + return "Infinity"; + case -Infinity: + return "-Infinity"; + default: + return value; + } +}, "serializeFloat"); +var serializeDateTime = /* @__PURE__ */ __name((date) => date.toISOString().replace(".000Z", "Z"), "serializeDateTime"); + +// src/serde-json.ts +var _json = /* @__PURE__ */ __name((obj) => { + if (obj == null) { + return {}; + } + if (Array.isArray(obj)) { + return obj.filter((_) => _ != null).map(_json); + } + if (typeof obj === "object") { + const target = {}; + for (const key of Object.keys(obj)) { + if (obj[key] == null) { + continue; + } + target[key] = _json(obj[key]); + } + return target; + } + return obj; +}, "_json"); + +// src/split-every.ts +function splitEvery(value, delimiter, numDelimiters) { + if (numDelimiters <= 0 || !Number.isInteger(numDelimiters)) { + throw new Error("Invalid number of delimiters (" + numDelimiters + ") for splitEvery."); + } + const segments = value.split(delimiter); + if (numDelimiters === 1) { + return segments; + } + const compoundSegments = []; + let currentSegment = ""; + for (let i = 0; i < segments.length; i++) { + if (currentSegment === "") { + currentSegment = segments[i]; + } else { + currentSegment += delimiter + segments[i]; + } + if ((i + 1) % numDelimiters === 0) { + compoundSegments.push(currentSegment); + currentSegment = ""; + } + } + if (currentSegment !== "") { + compoundSegments.push(currentSegment); + } + return compoundSegments; +} +__name(splitEvery, "splitEvery"); + +// src/split-header.ts +var splitHeader = /* @__PURE__ */ __name((value) => { + const z = value.length; + const values = []; + let withinQuotes = false; + let prevChar = void 0; + let anchor = 0; + for (let i = 0; i < z; ++i) { + const char = value[i]; + switch (char) { + case `"`: + if (prevChar !== "\\") { + withinQuotes = !withinQuotes; + } + break; + case ",": + if (!withinQuotes) { + values.push(value.slice(anchor, i)); + anchor = i + 1; + } + break; + default: + } + prevChar = char; + } + values.push(value.slice(anchor)); + return values.map((v) => { + v = v.trim(); + const z2 = v.length; + if (z2 < 2) { + return v; + } + if (v[0] === `"` && v[z2 - 1] === `"`) { + v = v.slice(1, z2 - 1); + } + return v.replace(/\\"/g, '"'); + }); +}, "splitHeader"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 690: +/***/ ((module) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AlgorithmId: () => AlgorithmId, + EndpointURLScheme: () => EndpointURLScheme, + FieldPosition: () => FieldPosition, + HttpApiKeyAuthLocation: () => HttpApiKeyAuthLocation, + HttpAuthLocation: () => HttpAuthLocation, + IniSectionType: () => IniSectionType, + RequestHandlerProtocol: () => RequestHandlerProtocol, + SMITHY_CONTEXT_KEY: () => SMITHY_CONTEXT_KEY, + getDefaultClientConfiguration: () => getDefaultClientConfiguration, + resolveDefaultRuntimeConfig: () => resolveDefaultRuntimeConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/auth/auth.ts +var HttpAuthLocation = /* @__PURE__ */ ((HttpAuthLocation2) => { + HttpAuthLocation2["HEADER"] = "header"; + HttpAuthLocation2["QUERY"] = "query"; + return HttpAuthLocation2; +})(HttpAuthLocation || {}); + +// src/auth/HttpApiKeyAuth.ts +var HttpApiKeyAuthLocation = /* @__PURE__ */ ((HttpApiKeyAuthLocation2) => { + HttpApiKeyAuthLocation2["HEADER"] = "header"; + HttpApiKeyAuthLocation2["QUERY"] = "query"; + return HttpApiKeyAuthLocation2; +})(HttpApiKeyAuthLocation || {}); + +// src/endpoint.ts +var EndpointURLScheme = /* @__PURE__ */ ((EndpointURLScheme2) => { + EndpointURLScheme2["HTTP"] = "http"; + EndpointURLScheme2["HTTPS"] = "https"; + return EndpointURLScheme2; +})(EndpointURLScheme || {}); + +// src/extensions/checksum.ts +var AlgorithmId = /* @__PURE__ */ ((AlgorithmId2) => { + AlgorithmId2["MD5"] = "md5"; + AlgorithmId2["CRC32"] = "crc32"; + AlgorithmId2["CRC32C"] = "crc32c"; + AlgorithmId2["SHA1"] = "sha1"; + AlgorithmId2["SHA256"] = "sha256"; + return AlgorithmId2; +})(AlgorithmId || {}); +var getChecksumConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + const checksumAlgorithms = []; + if (runtimeConfig.sha256 !== void 0) { + checksumAlgorithms.push({ + algorithmId: () => "sha256" /* SHA256 */, + checksumConstructor: () => runtimeConfig.sha256 + }); + } + if (runtimeConfig.md5 != void 0) { + checksumAlgorithms.push({ + algorithmId: () => "md5" /* MD5 */, + checksumConstructor: () => runtimeConfig.md5 + }); + } + return { + _checksumAlgorithms: checksumAlgorithms, + addChecksumAlgorithm(algo) { + this._checksumAlgorithms.push(algo); + }, + checksumAlgorithms() { + return this._checksumAlgorithms; + } + }; +}, "getChecksumConfiguration"); +var resolveChecksumRuntimeConfig = /* @__PURE__ */ __name((clientConfig) => { + const runtimeConfig = {}; + clientConfig.checksumAlgorithms().forEach((checksumAlgorithm) => { + runtimeConfig[checksumAlgorithm.algorithmId()] = checksumAlgorithm.checksumConstructor(); + }); + return runtimeConfig; +}, "resolveChecksumRuntimeConfig"); + +// src/extensions/defaultClientConfiguration.ts +var getDefaultClientConfiguration = /* @__PURE__ */ __name((runtimeConfig) => { + return { + ...getChecksumConfiguration(runtimeConfig) + }; +}, "getDefaultClientConfiguration"); +var resolveDefaultRuntimeConfig = /* @__PURE__ */ __name((config) => { + return { + ...resolveChecksumRuntimeConfig(config) + }; +}, "resolveDefaultRuntimeConfig"); + +// src/http.ts +var FieldPosition = /* @__PURE__ */ ((FieldPosition2) => { + FieldPosition2[FieldPosition2["HEADER"] = 0] = "HEADER"; + FieldPosition2[FieldPosition2["TRAILER"] = 1] = "TRAILER"; + return FieldPosition2; +})(FieldPosition || {}); + +// src/middleware.ts +var SMITHY_CONTEXT_KEY = "__smithy_context"; + +// src/profile.ts +var IniSectionType = /* @__PURE__ */ ((IniSectionType2) => { + IniSectionType2["PROFILE"] = "profile"; + IniSectionType2["SSO_SESSION"] = "sso-session"; + IniSectionType2["SERVICES"] = "services"; + return IniSectionType2; +})(IniSectionType || {}); + +// src/transfer.ts +var RequestHandlerProtocol = /* @__PURE__ */ ((RequestHandlerProtocol2) => { + RequestHandlerProtocol2["HTTP_0_9"] = "http/0.9"; + RequestHandlerProtocol2["HTTP_1_0"] = "http/1.0"; + RequestHandlerProtocol2["TDS_8_0"] = "tds/8.0"; + return RequestHandlerProtocol2; +})(RequestHandlerProtocol || {}); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 4494: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + parseUrl: () => parseUrl +}); +module.exports = __toCommonJS(src_exports); +var import_querystring_parser = __nccwpck_require__(8822); +var parseUrl = /* @__PURE__ */ __name((url) => { + if (typeof url === "string") { + return parseUrl(new URL(url)); + } + const { hostname, pathname, port, protocol, search } = url; + let query; + if (search) { + query = (0, import_querystring_parser.parseQueryString)(search); + } + return { + hostname, + port: port ? parseInt(port) : void 0, + protocol, + path: pathname, + query + }; +}, "parseUrl"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 2674: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fromBase64 = void 0; +const util_buffer_from_1 = __nccwpck_require__(4151); +const BASE64_REGEX = /^[A-Za-z0-9+/]*={0,2}$/; +const fromBase64 = (input) => { + if ((input.length * 3) % 4 !== 0) { + throw new TypeError(`Incorrect padding on base64 string.`); + } + if (!BASE64_REGEX.exec(input)) { + throw new TypeError(`Invalid base64 string.`); + } + const buffer = (0, util_buffer_from_1.fromString)(input, "base64"); + return new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength); +}; +exports.fromBase64 = fromBase64; + + +/***/ }), + +/***/ 8385: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +module.exports = __toCommonJS(src_exports); +__reExport(src_exports, __nccwpck_require__(2674), module.exports); +__reExport(src_exports, __nccwpck_require__(4871), module.exports); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 4871: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toBase64 = void 0; +const util_buffer_from_1 = __nccwpck_require__(4151); +const util_utf8_1 = __nccwpck_require__(1577); +const toBase64 = (_input) => { + let input; + if (typeof _input === "string") { + input = (0, util_utf8_1.fromUtf8)(_input); + } + else { + input = _input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-base64: toBase64 encoder function only accepts string | Uint8Array."); + } + return (0, util_buffer_from_1.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("base64"); +}; +exports.toBase64 = toBase64; + + +/***/ }), + +/***/ 3638: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + calculateBodyLength: () => calculateBodyLength +}); +module.exports = __toCommonJS(src_exports); + +// src/calculateBodyLength.ts +var import_fs = __nccwpck_require__(9896); +var calculateBodyLength = /* @__PURE__ */ __name((body) => { + if (!body) { + return 0; + } + if (typeof body === "string") { + return Buffer.byteLength(body); + } else if (typeof body.byteLength === "number") { + return body.byteLength; + } else if (typeof body.size === "number") { + return body.size; + } else if (typeof body.start === "number" && typeof body.end === "number") { + return body.end + 1 - body.start; + } else if (typeof body.path === "string" || Buffer.isBuffer(body.path)) { + return (0, import_fs.lstatSync)(body.path).size; + } else if (typeof body.fd === "number") { + return (0, import_fs.fstatSync)(body.fd).size; + } + throw new Error(`Body Length computation failed for ${body}`); +}, "calculateBodyLength"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 4151: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromArrayBuffer: () => fromArrayBuffer, + fromString: () => fromString +}); +module.exports = __toCommonJS(src_exports); +var import_is_array_buffer = __nccwpck_require__(6130); +var import_buffer = __nccwpck_require__(181); +var fromArrayBuffer = /* @__PURE__ */ __name((input, offset = 0, length = input.byteLength - offset) => { + if (!(0, import_is_array_buffer.isArrayBuffer)(input)) { + throw new TypeError(`The "input" argument must be ArrayBuffer. Received type ${typeof input} (${input})`); + } + return import_buffer.Buffer.from(input, offset, length); +}, "fromArrayBuffer"); +var fromString = /* @__PURE__ */ __name((input, encoding) => { + if (typeof input !== "string") { + throw new TypeError(`The "input" argument must be of type string. Received type ${typeof input} (${input})`); + } + return encoding ? import_buffer.Buffer.from(input, encoding) : import_buffer.Buffer.from(input); +}, "fromString"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 6716: +/***/ ((module) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + SelectorType: () => SelectorType, + booleanSelector: () => booleanSelector, + numberSelector: () => numberSelector +}); +module.exports = __toCommonJS(src_exports); + +// src/booleanSelector.ts +var booleanSelector = /* @__PURE__ */ __name((obj, key, type) => { + if (!(key in obj)) + return void 0; + if (obj[key] === "true") + return true; + if (obj[key] === "false") + return false; + throw new Error(`Cannot load ${type} "${key}". Expected "true" or "false", got ${obj[key]}.`); +}, "booleanSelector"); + +// src/numberSelector.ts +var numberSelector = /* @__PURE__ */ __name((obj, key, type) => { + if (!(key in obj)) + return void 0; + const numberValue = parseInt(obj[key], 10); + if (Number.isNaN(numberValue)) { + throw new TypeError(`Cannot load ${type} '${key}'. Expected number, got '${obj[key]}'.`); + } + return numberValue; +}, "numberSelector"); + +// src/types.ts +var SelectorType = /* @__PURE__ */ ((SelectorType2) => { + SelectorType2["ENV"] = "env"; + SelectorType2["CONFIG"] = "shared config entry"; + return SelectorType2; +})(SelectorType || {}); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 5435: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __create = Object.create; +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __getProtoOf = Object.getPrototypeOf; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, + mod +)); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + resolveDefaultsModeConfig: () => resolveDefaultsModeConfig +}); +module.exports = __toCommonJS(src_exports); + +// src/resolveDefaultsModeConfig.ts +var import_config_resolver = __nccwpck_require__(9316); +var import_node_config_provider = __nccwpck_require__(5704); +var import_property_provider = __nccwpck_require__(1238); + +// src/constants.ts +var AWS_EXECUTION_ENV = "AWS_EXECUTION_ENV"; +var AWS_REGION_ENV = "AWS_REGION"; +var AWS_DEFAULT_REGION_ENV = "AWS_DEFAULT_REGION"; +var ENV_IMDS_DISABLED = "AWS_EC2_METADATA_DISABLED"; +var DEFAULTS_MODE_OPTIONS = ["in-region", "cross-region", "mobile", "standard", "legacy"]; +var IMDS_REGION_PATH = "/latest/meta-data/placement/region"; + +// src/defaultsModeConfig.ts +var AWS_DEFAULTS_MODE_ENV = "AWS_DEFAULTS_MODE"; +var AWS_DEFAULTS_MODE_CONFIG = "defaults_mode"; +var NODE_DEFAULTS_MODE_CONFIG_OPTIONS = { + environmentVariableSelector: (env) => { + return env[AWS_DEFAULTS_MODE_ENV]; + }, + configFileSelector: (profile) => { + return profile[AWS_DEFAULTS_MODE_CONFIG]; + }, + default: "legacy" +}; + +// src/resolveDefaultsModeConfig.ts +var resolveDefaultsModeConfig = /* @__PURE__ */ __name(({ + region = (0, import_node_config_provider.loadConfig)(import_config_resolver.NODE_REGION_CONFIG_OPTIONS), + defaultsMode = (0, import_node_config_provider.loadConfig)(NODE_DEFAULTS_MODE_CONFIG_OPTIONS) +} = {}) => (0, import_property_provider.memoize)(async () => { + const mode = typeof defaultsMode === "function" ? await defaultsMode() : defaultsMode; + switch (mode == null ? void 0 : mode.toLowerCase()) { + case "auto": + return resolveNodeDefaultsModeAuto(region); + case "in-region": + case "cross-region": + case "mobile": + case "standard": + case "legacy": + return Promise.resolve(mode == null ? void 0 : mode.toLocaleLowerCase()); + case void 0: + return Promise.resolve("legacy"); + default: + throw new Error( + `Invalid parameter for "defaultsMode", expect ${DEFAULTS_MODE_OPTIONS.join(", ")}, got ${mode}` + ); + } +}), "resolveDefaultsModeConfig"); +var resolveNodeDefaultsModeAuto = /* @__PURE__ */ __name(async (clientRegion) => { + if (clientRegion) { + const resolvedRegion = typeof clientRegion === "function" ? await clientRegion() : clientRegion; + const inferredRegion = await inferPhysicalRegion(); + if (!inferredRegion) { + return "standard"; + } + if (resolvedRegion === inferredRegion) { + return "in-region"; + } else { + return "cross-region"; + } + } + return "standard"; +}, "resolveNodeDefaultsModeAuto"); +var inferPhysicalRegion = /* @__PURE__ */ __name(async () => { + if (process.env[AWS_EXECUTION_ENV] && (process.env[AWS_REGION_ENV] || process.env[AWS_DEFAULT_REGION_ENV])) { + return process.env[AWS_REGION_ENV] ?? process.env[AWS_DEFAULT_REGION_ENV]; + } + if (!process.env[ENV_IMDS_DISABLED]) { + try { + const { getInstanceMetadataEndpoint, httpRequest } = await Promise.resolve().then(() => __toESM(__nccwpck_require__(566))); + const endpoint = await getInstanceMetadataEndpoint(); + return (await httpRequest({ ...endpoint, path: IMDS_REGION_PATH })).toString(); + } catch (e) { + } + } +}, "inferPhysicalRegion"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 9674: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + EndpointCache: () => EndpointCache, + EndpointError: () => EndpointError, + customEndpointFunctions: () => customEndpointFunctions, + isIpAddress: () => isIpAddress, + isValidHostLabel: () => isValidHostLabel, + resolveEndpoint: () => resolveEndpoint +}); +module.exports = __toCommonJS(src_exports); + +// src/cache/EndpointCache.ts +var _EndpointCache = class _EndpointCache { + /** + * @param [size] - desired average maximum capacity. A buffer of 10 additional keys will be allowed + * before keys are dropped. + * @param [params] - list of params to consider as part of the cache key. + * + * If the params list is not populated, no caching will happen. + * This may be out of order depending on how the object is created and arrives to this class. + */ + constructor({ size, params }) { + this.data = /* @__PURE__ */ new Map(); + this.parameters = []; + this.capacity = size ?? 50; + if (params) { + this.parameters = params; + } + } + /** + * @param endpointParams - query for endpoint. + * @param resolver - provider of the value if not present. + * @returns endpoint corresponding to the query. + */ + get(endpointParams, resolver) { + const key = this.hash(endpointParams); + if (key === false) { + return resolver(); + } + if (!this.data.has(key)) { + if (this.data.size > this.capacity + 10) { + const keys = this.data.keys(); + let i = 0; + while (true) { + const { value, done } = keys.next(); + this.data.delete(value); + if (done || ++i > 10) { + break; + } + } + } + this.data.set(key, resolver()); + } + return this.data.get(key); + } + size() { + return this.data.size; + } + /** + * @returns cache key or false if not cachable. + */ + hash(endpointParams) { + let buffer = ""; + const { parameters } = this; + if (parameters.length === 0) { + return false; + } + for (const param of parameters) { + const val = String(endpointParams[param] ?? ""); + if (val.includes("|;")) { + return false; + } + buffer += val + "|;"; + } + return buffer; + } +}; +__name(_EndpointCache, "EndpointCache"); +var EndpointCache = _EndpointCache; + +// src/lib/isIpAddress.ts +var IP_V4_REGEX = new RegExp( + `^(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)(?:\\.(?:25[0-5]|2[0-4]\\d|1\\d\\d|[1-9]\\d|\\d)){3}$` +); +var isIpAddress = /* @__PURE__ */ __name((value) => IP_V4_REGEX.test(value) || value.startsWith("[") && value.endsWith("]"), "isIpAddress"); + +// src/lib/isValidHostLabel.ts +var VALID_HOST_LABEL_REGEX = new RegExp(`^(?!.*-$)(?!-)[a-zA-Z0-9-]{1,63}$`); +var isValidHostLabel = /* @__PURE__ */ __name((value, allowSubDomains = false) => { + if (!allowSubDomains) { + return VALID_HOST_LABEL_REGEX.test(value); + } + const labels = value.split("."); + for (const label of labels) { + if (!isValidHostLabel(label)) { + return false; + } + } + return true; +}, "isValidHostLabel"); + +// src/utils/customEndpointFunctions.ts +var customEndpointFunctions = {}; + +// src/debug/debugId.ts +var debugId = "endpoints"; + +// src/debug/toDebugString.ts +function toDebugString(input) { + if (typeof input !== "object" || input == null) { + return input; + } + if ("ref" in input) { + return `$${toDebugString(input.ref)}`; + } + if ("fn" in input) { + return `${input.fn}(${(input.argv || []).map(toDebugString).join(", ")})`; + } + return JSON.stringify(input, null, 2); +} +__name(toDebugString, "toDebugString"); + +// src/types/EndpointError.ts +var _EndpointError = class _EndpointError extends Error { + constructor(message) { + super(message); + this.name = "EndpointError"; + } +}; +__name(_EndpointError, "EndpointError"); +var EndpointError = _EndpointError; + +// src/lib/booleanEquals.ts +var booleanEquals = /* @__PURE__ */ __name((value1, value2) => value1 === value2, "booleanEquals"); + +// src/lib/getAttrPathList.ts +var getAttrPathList = /* @__PURE__ */ __name((path) => { + const parts = path.split("."); + const pathList = []; + for (const part of parts) { + const squareBracketIndex = part.indexOf("["); + if (squareBracketIndex !== -1) { + if (part.indexOf("]") !== part.length - 1) { + throw new EndpointError(`Path: '${path}' does not end with ']'`); + } + const arrayIndex = part.slice(squareBracketIndex + 1, -1); + if (Number.isNaN(parseInt(arrayIndex))) { + throw new EndpointError(`Invalid array index: '${arrayIndex}' in path: '${path}'`); + } + if (squareBracketIndex !== 0) { + pathList.push(part.slice(0, squareBracketIndex)); + } + pathList.push(arrayIndex); + } else { + pathList.push(part); + } + } + return pathList; +}, "getAttrPathList"); + +// src/lib/getAttr.ts +var getAttr = /* @__PURE__ */ __name((value, path) => getAttrPathList(path).reduce((acc, index) => { + if (typeof acc !== "object") { + throw new EndpointError(`Index '${index}' in '${path}' not found in '${JSON.stringify(value)}'`); + } else if (Array.isArray(acc)) { + return acc[parseInt(index)]; + } + return acc[index]; +}, value), "getAttr"); + +// src/lib/isSet.ts +var isSet = /* @__PURE__ */ __name((value) => value != null, "isSet"); + +// src/lib/not.ts +var not = /* @__PURE__ */ __name((value) => !value, "not"); + +// src/lib/parseURL.ts +var import_types3 = __nccwpck_require__(690); +var DEFAULT_PORTS = { + [import_types3.EndpointURLScheme.HTTP]: 80, + [import_types3.EndpointURLScheme.HTTPS]: 443 +}; +var parseURL = /* @__PURE__ */ __name((value) => { + const whatwgURL = (() => { + try { + if (value instanceof URL) { + return value; + } + if (typeof value === "object" && "hostname" in value) { + const { hostname: hostname2, port, protocol: protocol2 = "", path = "", query = {} } = value; + const url = new URL(`${protocol2}//${hostname2}${port ? `:${port}` : ""}${path}`); + url.search = Object.entries(query).map(([k, v]) => `${k}=${v}`).join("&"); + return url; + } + return new URL(value); + } catch (error) { + return null; + } + })(); + if (!whatwgURL) { + console.error(`Unable to parse ${JSON.stringify(value)} as a whatwg URL.`); + return null; + } + const urlString = whatwgURL.href; + const { host, hostname, pathname, protocol, search } = whatwgURL; + if (search) { + return null; + } + const scheme = protocol.slice(0, -1); + if (!Object.values(import_types3.EndpointURLScheme).includes(scheme)) { + return null; + } + const isIp = isIpAddress(hostname); + const inputContainsDefaultPort = urlString.includes(`${host}:${DEFAULT_PORTS[scheme]}`) || typeof value === "string" && value.includes(`${host}:${DEFAULT_PORTS[scheme]}`); + const authority = `${host}${inputContainsDefaultPort ? `:${DEFAULT_PORTS[scheme]}` : ``}`; + return { + scheme, + authority, + path: pathname, + normalizedPath: pathname.endsWith("/") ? pathname : `${pathname}/`, + isIp + }; +}, "parseURL"); + +// src/lib/stringEquals.ts +var stringEquals = /* @__PURE__ */ __name((value1, value2) => value1 === value2, "stringEquals"); + +// src/lib/substring.ts +var substring = /* @__PURE__ */ __name((input, start, stop, reverse) => { + if (start >= stop || input.length < stop) { + return null; + } + if (!reverse) { + return input.substring(start, stop); + } + return input.substring(input.length - stop, input.length - start); +}, "substring"); + +// src/lib/uriEncode.ts +var uriEncode = /* @__PURE__ */ __name((value) => encodeURIComponent(value).replace(/[!*'()]/g, (c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`), "uriEncode"); + +// src/utils/endpointFunctions.ts +var endpointFunctions = { + booleanEquals, + getAttr, + isSet, + isValidHostLabel, + not, + parseURL, + stringEquals, + substring, + uriEncode +}; + +// src/utils/evaluateTemplate.ts +var evaluateTemplate = /* @__PURE__ */ __name((template, options) => { + const evaluatedTemplateArr = []; + const templateContext = { + ...options.endpointParams, + ...options.referenceRecord + }; + let currentIndex = 0; + while (currentIndex < template.length) { + const openingBraceIndex = template.indexOf("{", currentIndex); + if (openingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(currentIndex)); + break; + } + evaluatedTemplateArr.push(template.slice(currentIndex, openingBraceIndex)); + const closingBraceIndex = template.indexOf("}", openingBraceIndex); + if (closingBraceIndex === -1) { + evaluatedTemplateArr.push(template.slice(openingBraceIndex)); + break; + } + if (template[openingBraceIndex + 1] === "{" && template[closingBraceIndex + 1] === "}") { + evaluatedTemplateArr.push(template.slice(openingBraceIndex + 1, closingBraceIndex)); + currentIndex = closingBraceIndex + 2; + } + const parameterName = template.substring(openingBraceIndex + 1, closingBraceIndex); + if (parameterName.includes("#")) { + const [refName, attrName] = parameterName.split("#"); + evaluatedTemplateArr.push(getAttr(templateContext[refName], attrName)); + } else { + evaluatedTemplateArr.push(templateContext[parameterName]); + } + currentIndex = closingBraceIndex + 1; + } + return evaluatedTemplateArr.join(""); +}, "evaluateTemplate"); + +// src/utils/getReferenceValue.ts +var getReferenceValue = /* @__PURE__ */ __name(({ ref }, options) => { + const referenceRecord = { + ...options.endpointParams, + ...options.referenceRecord + }; + return referenceRecord[ref]; +}, "getReferenceValue"); + +// src/utils/evaluateExpression.ts +var evaluateExpression = /* @__PURE__ */ __name((obj, keyName, options) => { + if (typeof obj === "string") { + return evaluateTemplate(obj, options); + } else if (obj["fn"]) { + return callFunction(obj, options); + } else if (obj["ref"]) { + return getReferenceValue(obj, options); + } + throw new EndpointError(`'${keyName}': ${String(obj)} is not a string, function or reference.`); +}, "evaluateExpression"); + +// src/utils/callFunction.ts +var callFunction = /* @__PURE__ */ __name(({ fn, argv }, options) => { + const evaluatedArgs = argv.map( + (arg) => ["boolean", "number"].includes(typeof arg) ? arg : evaluateExpression(arg, "arg", options) + ); + const fnSegments = fn.split("."); + if (fnSegments[0] in customEndpointFunctions && fnSegments[1] != null) { + return customEndpointFunctions[fnSegments[0]][fnSegments[1]](...evaluatedArgs); + } + return endpointFunctions[fn](...evaluatedArgs); +}, "callFunction"); + +// src/utils/evaluateCondition.ts +var evaluateCondition = /* @__PURE__ */ __name(({ assign, ...fnArgs }, options) => { + var _a, _b; + if (assign && assign in options.referenceRecord) { + throw new EndpointError(`'${assign}' is already defined in Reference Record.`); + } + const value = callFunction(fnArgs, options); + (_b = (_a = options.logger) == null ? void 0 : _a.debug) == null ? void 0 : _b.call(_a, `${debugId} evaluateCondition: ${toDebugString(fnArgs)} = ${toDebugString(value)}`); + return { + result: value === "" ? true : !!value, + ...assign != null && { toAssign: { name: assign, value } } + }; +}, "evaluateCondition"); + +// src/utils/evaluateConditions.ts +var evaluateConditions = /* @__PURE__ */ __name((conditions = [], options) => { + var _a, _b; + const conditionsReferenceRecord = {}; + for (const condition of conditions) { + const { result, toAssign } = evaluateCondition(condition, { + ...options, + referenceRecord: { + ...options.referenceRecord, + ...conditionsReferenceRecord + } + }); + if (!result) { + return { result }; + } + if (toAssign) { + conditionsReferenceRecord[toAssign.name] = toAssign.value; + (_b = (_a = options.logger) == null ? void 0 : _a.debug) == null ? void 0 : _b.call(_a, `${debugId} assign: ${toAssign.name} := ${toDebugString(toAssign.value)}`); + } + } + return { result: true, referenceRecord: conditionsReferenceRecord }; +}, "evaluateConditions"); + +// src/utils/getEndpointHeaders.ts +var getEndpointHeaders = /* @__PURE__ */ __name((headers, options) => Object.entries(headers).reduce( + (acc, [headerKey, headerVal]) => ({ + ...acc, + [headerKey]: headerVal.map((headerValEntry) => { + const processedExpr = evaluateExpression(headerValEntry, "Header value entry", options); + if (typeof processedExpr !== "string") { + throw new EndpointError(`Header '${headerKey}' value '${processedExpr}' is not a string`); + } + return processedExpr; + }) + }), + {} +), "getEndpointHeaders"); + +// src/utils/getEndpointProperty.ts +var getEndpointProperty = /* @__PURE__ */ __name((property, options) => { + if (Array.isArray(property)) { + return property.map((propertyEntry) => getEndpointProperty(propertyEntry, options)); + } + switch (typeof property) { + case "string": + return evaluateTemplate(property, options); + case "object": + if (property === null) { + throw new EndpointError(`Unexpected endpoint property: ${property}`); + } + return getEndpointProperties(property, options); + case "boolean": + return property; + default: + throw new EndpointError(`Unexpected endpoint property type: ${typeof property}`); + } +}, "getEndpointProperty"); + +// src/utils/getEndpointProperties.ts +var getEndpointProperties = /* @__PURE__ */ __name((properties, options) => Object.entries(properties).reduce( + (acc, [propertyKey, propertyVal]) => ({ + ...acc, + [propertyKey]: getEndpointProperty(propertyVal, options) + }), + {} +), "getEndpointProperties"); + +// src/utils/getEndpointUrl.ts +var getEndpointUrl = /* @__PURE__ */ __name((endpointUrl, options) => { + const expression = evaluateExpression(endpointUrl, "Endpoint URL", options); + if (typeof expression === "string") { + try { + return new URL(expression); + } catch (error) { + console.error(`Failed to construct URL with ${expression}`, error); + throw error; + } + } + throw new EndpointError(`Endpoint URL must be a string, got ${typeof expression}`); +}, "getEndpointUrl"); + +// src/utils/evaluateEndpointRule.ts +var evaluateEndpointRule = /* @__PURE__ */ __name((endpointRule, options) => { + var _a, _b; + const { conditions, endpoint } = endpointRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + const endpointRuleOptions = { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord } + }; + const { url, properties, headers } = endpoint; + (_b = (_a = options.logger) == null ? void 0 : _a.debug) == null ? void 0 : _b.call(_a, `${debugId} Resolving endpoint from template: ${toDebugString(endpoint)}`); + return { + ...headers != void 0 && { + headers: getEndpointHeaders(headers, endpointRuleOptions) + }, + ...properties != void 0 && { + properties: getEndpointProperties(properties, endpointRuleOptions) + }, + url: getEndpointUrl(url, endpointRuleOptions) + }; +}, "evaluateEndpointRule"); + +// src/utils/evaluateErrorRule.ts +var evaluateErrorRule = /* @__PURE__ */ __name((errorRule, options) => { + const { conditions, error } = errorRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + throw new EndpointError( + evaluateExpression(error, "Error", { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord } + }) + ); +}, "evaluateErrorRule"); + +// src/utils/evaluateTreeRule.ts +var evaluateTreeRule = /* @__PURE__ */ __name((treeRule, options) => { + const { conditions, rules } = treeRule; + const { result, referenceRecord } = evaluateConditions(conditions, options); + if (!result) { + return; + } + return evaluateRules(rules, { + ...options, + referenceRecord: { ...options.referenceRecord, ...referenceRecord } + }); +}, "evaluateTreeRule"); + +// src/utils/evaluateRules.ts +var evaluateRules = /* @__PURE__ */ __name((rules, options) => { + for (const rule of rules) { + if (rule.type === "endpoint") { + const endpointOrUndefined = evaluateEndpointRule(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } else if (rule.type === "error") { + evaluateErrorRule(rule, options); + } else if (rule.type === "tree") { + const endpointOrUndefined = evaluateTreeRule(rule, options); + if (endpointOrUndefined) { + return endpointOrUndefined; + } + } else { + throw new EndpointError(`Unknown endpoint rule: ${rule}`); + } + } + throw new EndpointError(`Rules evaluation failed`); +}, "evaluateRules"); + +// src/resolveEndpoint.ts +var resolveEndpoint = /* @__PURE__ */ __name((ruleSetObject, options) => { + var _a, _b, _c, _d; + const { endpointParams, logger } = options; + const { parameters, rules } = ruleSetObject; + (_b = (_a = options.logger) == null ? void 0 : _a.debug) == null ? void 0 : _b.call(_a, `${debugId} Initial EndpointParams: ${toDebugString(endpointParams)}`); + const paramsWithDefault = Object.entries(parameters).filter(([, v]) => v.default != null).map(([k, v]) => [k, v.default]); + if (paramsWithDefault.length > 0) { + for (const [paramKey, paramDefaultValue] of paramsWithDefault) { + endpointParams[paramKey] = endpointParams[paramKey] ?? paramDefaultValue; + } + } + const requiredParams = Object.entries(parameters).filter(([, v]) => v.required).map(([k]) => k); + for (const requiredParam of requiredParams) { + if (endpointParams[requiredParam] == null) { + throw new EndpointError(`Missing required parameter: '${requiredParam}'`); + } + } + const endpoint = evaluateRules(rules, { endpointParams, logger, referenceRecord: {} }); + (_d = (_c = options.logger) == null ? void 0 : _c.debug) == null ? void 0 : _d.call(_c, `${debugId} Resolved endpoint: ${toDebugString(endpoint)}`); + return endpoint; +}, "resolveEndpoint"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 6435: +/***/ ((module) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromHex: () => fromHex, + toHex: () => toHex +}); +module.exports = __toCommonJS(src_exports); +var SHORT_TO_HEX = {}; +var HEX_TO_SHORT = {}; +for (let i = 0; i < 256; i++) { + let encodedByte = i.toString(16).toLowerCase(); + if (encodedByte.length === 1) { + encodedByte = `0${encodedByte}`; + } + SHORT_TO_HEX[i] = encodedByte; + HEX_TO_SHORT[encodedByte] = i; +} +function fromHex(encoded) { + if (encoded.length % 2 !== 0) { + throw new Error("Hex encoded strings must have an even number length"); + } + const out = new Uint8Array(encoded.length / 2); + for (let i = 0; i < encoded.length; i += 2) { + const encodedByte = encoded.slice(i, i + 2).toLowerCase(); + if (encodedByte in HEX_TO_SHORT) { + out[i / 2] = HEX_TO_SHORT[encodedByte]; + } else { + throw new Error(`Cannot decode unrecognized sequence ${encodedByte} as hexadecimal`); + } + } + return out; +} +__name(fromHex, "fromHex"); +function toHex(bytes) { + let out = ""; + for (let i = 0; i < bytes.byteLength; i++) { + out += SHORT_TO_HEX[bytes[i]]; + } + return out; +} +__name(toHex, "toHex"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 6324: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + getSmithyContext: () => getSmithyContext, + normalizeProvider: () => normalizeProvider +}); +module.exports = __toCommonJS(src_exports); + +// src/getSmithyContext.ts +var import_types = __nccwpck_require__(690); +var getSmithyContext = /* @__PURE__ */ __name((context) => context[import_types.SMITHY_CONTEXT_KEY] || (context[import_types.SMITHY_CONTEXT_KEY] = {}), "getSmithyContext"); + +// src/normalizeProvider.ts +var normalizeProvider = /* @__PURE__ */ __name((input) => { + if (typeof input === "function") + return input; + const promisified = Promise.resolve(input); + return () => promisified; +}, "normalizeProvider"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 5518: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + AdaptiveRetryStrategy: () => AdaptiveRetryStrategy, + ConfiguredRetryStrategy: () => ConfiguredRetryStrategy, + DEFAULT_MAX_ATTEMPTS: () => DEFAULT_MAX_ATTEMPTS, + DEFAULT_RETRY_DELAY_BASE: () => DEFAULT_RETRY_DELAY_BASE, + DEFAULT_RETRY_MODE: () => DEFAULT_RETRY_MODE, + DefaultRateLimiter: () => DefaultRateLimiter, + INITIAL_RETRY_TOKENS: () => INITIAL_RETRY_TOKENS, + INVOCATION_ID_HEADER: () => INVOCATION_ID_HEADER, + MAXIMUM_RETRY_DELAY: () => MAXIMUM_RETRY_DELAY, + NO_RETRY_INCREMENT: () => NO_RETRY_INCREMENT, + REQUEST_HEADER: () => REQUEST_HEADER, + RETRY_COST: () => RETRY_COST, + RETRY_MODES: () => RETRY_MODES, + StandardRetryStrategy: () => StandardRetryStrategy, + THROTTLING_RETRY_DELAY_BASE: () => THROTTLING_RETRY_DELAY_BASE, + TIMEOUT_RETRY_COST: () => TIMEOUT_RETRY_COST +}); +module.exports = __toCommonJS(src_exports); + +// src/config.ts +var RETRY_MODES = /* @__PURE__ */ ((RETRY_MODES2) => { + RETRY_MODES2["STANDARD"] = "standard"; + RETRY_MODES2["ADAPTIVE"] = "adaptive"; + return RETRY_MODES2; +})(RETRY_MODES || {}); +var DEFAULT_MAX_ATTEMPTS = 3; +var DEFAULT_RETRY_MODE = "standard" /* STANDARD */; + +// src/DefaultRateLimiter.ts +var import_service_error_classification = __nccwpck_require__(2058); +var _DefaultRateLimiter = class _DefaultRateLimiter { + constructor(options) { + // Pre-set state variables + this.currentCapacity = 0; + this.enabled = false; + this.lastMaxRate = 0; + this.measuredTxRate = 0; + this.requestCount = 0; + this.lastTimestamp = 0; + this.timeWindow = 0; + this.beta = (options == null ? void 0 : options.beta) ?? 0.7; + this.minCapacity = (options == null ? void 0 : options.minCapacity) ?? 1; + this.minFillRate = (options == null ? void 0 : options.minFillRate) ?? 0.5; + this.scaleConstant = (options == null ? void 0 : options.scaleConstant) ?? 0.4; + this.smooth = (options == null ? void 0 : options.smooth) ?? 0.8; + const currentTimeInSeconds = this.getCurrentTimeInSeconds(); + this.lastThrottleTime = currentTimeInSeconds; + this.lastTxRateBucket = Math.floor(this.getCurrentTimeInSeconds()); + this.fillRate = this.minFillRate; + this.maxCapacity = this.minCapacity; + } + getCurrentTimeInSeconds() { + return Date.now() / 1e3; + } + async getSendToken() { + return this.acquireTokenBucket(1); + } + async acquireTokenBucket(amount) { + if (!this.enabled) { + return; + } + this.refillTokenBucket(); + if (amount > this.currentCapacity) { + const delay = (amount - this.currentCapacity) / this.fillRate * 1e3; + await new Promise((resolve) => _DefaultRateLimiter.setTimeoutFn(resolve, delay)); + } + this.currentCapacity = this.currentCapacity - amount; + } + refillTokenBucket() { + const timestamp = this.getCurrentTimeInSeconds(); + if (!this.lastTimestamp) { + this.lastTimestamp = timestamp; + return; + } + const fillAmount = (timestamp - this.lastTimestamp) * this.fillRate; + this.currentCapacity = Math.min(this.maxCapacity, this.currentCapacity + fillAmount); + this.lastTimestamp = timestamp; + } + updateClientSendingRate(response) { + let calculatedRate; + this.updateMeasuredRate(); + if ((0, import_service_error_classification.isThrottlingError)(response)) { + const rateToUse = !this.enabled ? this.measuredTxRate : Math.min(this.measuredTxRate, this.fillRate); + this.lastMaxRate = rateToUse; + this.calculateTimeWindow(); + this.lastThrottleTime = this.getCurrentTimeInSeconds(); + calculatedRate = this.cubicThrottle(rateToUse); + this.enableTokenBucket(); + } else { + this.calculateTimeWindow(); + calculatedRate = this.cubicSuccess(this.getCurrentTimeInSeconds()); + } + const newRate = Math.min(calculatedRate, 2 * this.measuredTxRate); + this.updateTokenBucketRate(newRate); + } + calculateTimeWindow() { + this.timeWindow = this.getPrecise(Math.pow(this.lastMaxRate * (1 - this.beta) / this.scaleConstant, 1 / 3)); + } + cubicThrottle(rateToUse) { + return this.getPrecise(rateToUse * this.beta); + } + cubicSuccess(timestamp) { + return this.getPrecise( + this.scaleConstant * Math.pow(timestamp - this.lastThrottleTime - this.timeWindow, 3) + this.lastMaxRate + ); + } + enableTokenBucket() { + this.enabled = true; + } + updateTokenBucketRate(newRate) { + this.refillTokenBucket(); + this.fillRate = Math.max(newRate, this.minFillRate); + this.maxCapacity = Math.max(newRate, this.minCapacity); + this.currentCapacity = Math.min(this.currentCapacity, this.maxCapacity); + } + updateMeasuredRate() { + const t = this.getCurrentTimeInSeconds(); + const timeBucket = Math.floor(t * 2) / 2; + this.requestCount++; + if (timeBucket > this.lastTxRateBucket) { + const currentRate = this.requestCount / (timeBucket - this.lastTxRateBucket); + this.measuredTxRate = this.getPrecise(currentRate * this.smooth + this.measuredTxRate * (1 - this.smooth)); + this.requestCount = 0; + this.lastTxRateBucket = timeBucket; + } + } + getPrecise(num) { + return parseFloat(num.toFixed(8)); + } +}; +__name(_DefaultRateLimiter, "DefaultRateLimiter"); +/** + * Only used in testing. + */ +_DefaultRateLimiter.setTimeoutFn = setTimeout; +var DefaultRateLimiter = _DefaultRateLimiter; + +// src/constants.ts +var DEFAULT_RETRY_DELAY_BASE = 100; +var MAXIMUM_RETRY_DELAY = 20 * 1e3; +var THROTTLING_RETRY_DELAY_BASE = 500; +var INITIAL_RETRY_TOKENS = 500; +var RETRY_COST = 5; +var TIMEOUT_RETRY_COST = 10; +var NO_RETRY_INCREMENT = 1; +var INVOCATION_ID_HEADER = "amz-sdk-invocation-id"; +var REQUEST_HEADER = "amz-sdk-request"; + +// src/defaultRetryBackoffStrategy.ts +var getDefaultRetryBackoffStrategy = /* @__PURE__ */ __name(() => { + let delayBase = DEFAULT_RETRY_DELAY_BASE; + const computeNextBackoffDelay = /* @__PURE__ */ __name((attempts) => { + return Math.floor(Math.min(MAXIMUM_RETRY_DELAY, Math.random() * 2 ** attempts * delayBase)); + }, "computeNextBackoffDelay"); + const setDelayBase = /* @__PURE__ */ __name((delay) => { + delayBase = delay; + }, "setDelayBase"); + return { + computeNextBackoffDelay, + setDelayBase + }; +}, "getDefaultRetryBackoffStrategy"); + +// src/defaultRetryToken.ts +var createDefaultRetryToken = /* @__PURE__ */ __name(({ + retryDelay, + retryCount, + retryCost +}) => { + const getRetryCount = /* @__PURE__ */ __name(() => retryCount, "getRetryCount"); + const getRetryDelay = /* @__PURE__ */ __name(() => Math.min(MAXIMUM_RETRY_DELAY, retryDelay), "getRetryDelay"); + const getRetryCost = /* @__PURE__ */ __name(() => retryCost, "getRetryCost"); + return { + getRetryCount, + getRetryDelay, + getRetryCost + }; +}, "createDefaultRetryToken"); + +// src/StandardRetryStrategy.ts +var _StandardRetryStrategy = class _StandardRetryStrategy { + constructor(maxAttempts) { + this.maxAttempts = maxAttempts; + this.mode = "standard" /* STANDARD */; + this.capacity = INITIAL_RETRY_TOKENS; + this.retryBackoffStrategy = getDefaultRetryBackoffStrategy(); + this.maxAttemptsProvider = typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts; + } + // eslint-disable-next-line @typescript-eslint/no-unused-vars + async acquireInitialRetryToken(retryTokenScope) { + return createDefaultRetryToken({ + retryDelay: DEFAULT_RETRY_DELAY_BASE, + retryCount: 0 + }); + } + async refreshRetryTokenForRetry(token, errorInfo) { + const maxAttempts = await this.getMaxAttempts(); + if (this.shouldRetry(token, errorInfo, maxAttempts)) { + const errorType = errorInfo.errorType; + this.retryBackoffStrategy.setDelayBase( + errorType === "THROTTLING" ? THROTTLING_RETRY_DELAY_BASE : DEFAULT_RETRY_DELAY_BASE + ); + const delayFromErrorType = this.retryBackoffStrategy.computeNextBackoffDelay(token.getRetryCount()); + const retryDelay = errorInfo.retryAfterHint ? Math.max(errorInfo.retryAfterHint.getTime() - Date.now() || 0, delayFromErrorType) : delayFromErrorType; + const capacityCost = this.getCapacityCost(errorType); + this.capacity -= capacityCost; + return createDefaultRetryToken({ + retryDelay, + retryCount: token.getRetryCount() + 1, + retryCost: capacityCost + }); + } + throw new Error("No retry token available"); + } + recordSuccess(token) { + this.capacity = Math.max(INITIAL_RETRY_TOKENS, this.capacity + (token.getRetryCost() ?? NO_RETRY_INCREMENT)); + } + /** + * @returns the current available retry capacity. + * + * This number decreases when retries are executed and refills when requests or retries succeed. + */ + getCapacity() { + return this.capacity; + } + async getMaxAttempts() { + try { + return await this.maxAttemptsProvider(); + } catch (error) { + console.warn(`Max attempts provider could not resolve. Using default of ${DEFAULT_MAX_ATTEMPTS}`); + return DEFAULT_MAX_ATTEMPTS; + } + } + shouldRetry(tokenToRenew, errorInfo, maxAttempts) { + const attempts = tokenToRenew.getRetryCount() + 1; + return attempts < maxAttempts && this.capacity >= this.getCapacityCost(errorInfo.errorType) && this.isRetryableError(errorInfo.errorType); + } + getCapacityCost(errorType) { + return errorType === "TRANSIENT" ? TIMEOUT_RETRY_COST : RETRY_COST; + } + isRetryableError(errorType) { + return errorType === "THROTTLING" || errorType === "TRANSIENT"; + } +}; +__name(_StandardRetryStrategy, "StandardRetryStrategy"); +var StandardRetryStrategy = _StandardRetryStrategy; + +// src/AdaptiveRetryStrategy.ts +var _AdaptiveRetryStrategy = class _AdaptiveRetryStrategy { + constructor(maxAttemptsProvider, options) { + this.maxAttemptsProvider = maxAttemptsProvider; + this.mode = "adaptive" /* ADAPTIVE */; + const { rateLimiter } = options ?? {}; + this.rateLimiter = rateLimiter ?? new DefaultRateLimiter(); + this.standardRetryStrategy = new StandardRetryStrategy(maxAttemptsProvider); + } + async acquireInitialRetryToken(retryTokenScope) { + await this.rateLimiter.getSendToken(); + return this.standardRetryStrategy.acquireInitialRetryToken(retryTokenScope); + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + this.rateLimiter.updateClientSendingRate(errorInfo); + return this.standardRetryStrategy.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + } + recordSuccess(token) { + this.rateLimiter.updateClientSendingRate({}); + this.standardRetryStrategy.recordSuccess(token); + } +}; +__name(_AdaptiveRetryStrategy, "AdaptiveRetryStrategy"); +var AdaptiveRetryStrategy = _AdaptiveRetryStrategy; + +// src/ConfiguredRetryStrategy.ts +var _ConfiguredRetryStrategy = class _ConfiguredRetryStrategy extends StandardRetryStrategy { + /** + * @param maxAttempts - the maximum number of retry attempts allowed. + * e.g., if set to 3, then 4 total requests are possible. + * @param computeNextBackoffDelay - a millisecond delay for each retry or a function that takes the retry attempt + * and returns the delay. + * + * @example exponential backoff. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, (attempt) => attempt ** 2) + * }); + * ``` + * @example constant delay. + * ```js + * new Client({ + * retryStrategy: new ConfiguredRetryStrategy(3, 2000) + * }); + * ``` + */ + constructor(maxAttempts, computeNextBackoffDelay = DEFAULT_RETRY_DELAY_BASE) { + super(typeof maxAttempts === "function" ? maxAttempts : async () => maxAttempts); + if (typeof computeNextBackoffDelay === "number") { + this.computeNextBackoffDelay = () => computeNextBackoffDelay; + } else { + this.computeNextBackoffDelay = computeNextBackoffDelay; + } + } + async refreshRetryTokenForRetry(tokenToRenew, errorInfo) { + const token = await super.refreshRetryTokenForRetry(tokenToRenew, errorInfo); + token.getRetryDelay = () => this.computeNextBackoffDelay(token.getRetryCount()); + return token; + } +}; +__name(_ConfiguredRetryStrategy, "ConfiguredRetryStrategy"); +var ConfiguredRetryStrategy = _ConfiguredRetryStrategy; +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 7753: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ChecksumStream = void 0; +const ReadableStreamRef = typeof ReadableStream === "function" ? ReadableStream : function () { }; +class ChecksumStream extends ReadableStreamRef { +} +exports.ChecksumStream = ChecksumStream; + + +/***/ }), + +/***/ 1775: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ChecksumStream = void 0; +const util_base64_1 = __nccwpck_require__(8385); +const stream_1 = __nccwpck_require__(2203); +class ChecksumStream extends stream_1.Duplex { + constructor({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }) { + var _a, _b; + super(); + if (typeof source.pipe === "function") { + this.source = source; + } + else { + throw new Error(`@smithy/util-stream: unsupported source type ${(_b = (_a = source === null || source === void 0 ? void 0 : source.constructor) === null || _a === void 0 ? void 0 : _a.name) !== null && _b !== void 0 ? _b : source} in ChecksumStream.`); + } + this.base64Encoder = base64Encoder !== null && base64Encoder !== void 0 ? base64Encoder : util_base64_1.toBase64; + this.expectedChecksum = expectedChecksum; + this.checksum = checksum; + this.checksumSourceLocation = checksumSourceLocation; + this.source.pipe(this); + } + _read(size) { } + _write(chunk, encoding, callback) { + try { + this.checksum.update(chunk); + this.push(chunk); + } + catch (e) { + return callback(e); + } + return callback(); + } + async _final(callback) { + try { + const digest = await this.checksum.digest(); + const received = this.base64Encoder(digest); + if (this.expectedChecksum !== received) { + return callback(new Error(`Checksum mismatch: expected "${this.expectedChecksum}" but received "${received}"` + + ` in response header "${this.checksumSourceLocation}".`)); + } + } + catch (e) { + return callback(e); + } + this.push(null); + return callback(); + } +} +exports.ChecksumStream = ChecksumStream; + + +/***/ }), + +/***/ 4129: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createChecksumStream = void 0; +const util_base64_1 = __nccwpck_require__(8385); +const stream_type_check_1 = __nccwpck_require__(4414); +const ChecksumStream_browser_1 = __nccwpck_require__(7753); +const createChecksumStream = ({ expectedChecksum, checksum, source, checksumSourceLocation, base64Encoder, }) => { + var _a, _b; + if (!(0, stream_type_check_1.isReadableStream)(source)) { + throw new Error(`@smithy/util-stream: unsupported source type ${(_b = (_a = source === null || source === void 0 ? void 0 : source.constructor) === null || _a === void 0 ? void 0 : _a.name) !== null && _b !== void 0 ? _b : source} in ChecksumStream.`); + } + const encoder = base64Encoder !== null && base64Encoder !== void 0 ? base64Encoder : util_base64_1.toBase64; + if (typeof TransformStream !== "function") { + throw new Error("@smithy/util-stream: unable to instantiate ChecksumStream because API unavailable: ReadableStream/TransformStream."); + } + const transform = new TransformStream({ + start() { }, + async transform(chunk, controller) { + checksum.update(chunk); + controller.enqueue(chunk); + }, + async flush(controller) { + const digest = await checksum.digest(); + const received = encoder(digest); + if (expectedChecksum !== received) { + const error = new Error(`Checksum mismatch: expected "${expectedChecksum}" but received "${received}"` + + ` in response header "${checksumSourceLocation}".`); + controller.error(error); + } + else { + controller.terminate(); + } + }, + }); + source.pipeThrough(transform); + const readable = transform.readable; + Object.setPrototypeOf(readable, ChecksumStream_browser_1.ChecksumStream.prototype); + return readable; +}; +exports.createChecksumStream = createChecksumStream; + + +/***/ }), + +/***/ 5639: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createChecksumStream = void 0; +const stream_type_check_1 = __nccwpck_require__(4414); +const ChecksumStream_1 = __nccwpck_require__(1775); +const createChecksumStream_browser_1 = __nccwpck_require__(4129); +function createChecksumStream(init) { + if (typeof ReadableStream === "function" && (0, stream_type_check_1.isReadableStream)(init.source)) { + return (0, createChecksumStream_browser_1.createChecksumStream)(init); + } + return new ChecksumStream_1.ChecksumStream(init); +} +exports.createChecksumStream = createChecksumStream; + + +/***/ }), + +/***/ 6522: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getAwsChunkedEncodingStream = void 0; +const stream_1 = __nccwpck_require__(2203); +const getAwsChunkedEncodingStream = (readableStream, options) => { + const { base64Encoder, bodyLengthChecker, checksumAlgorithmFn, checksumLocationName, streamHasher } = options; + const checksumRequired = base64Encoder !== undefined && + checksumAlgorithmFn !== undefined && + checksumLocationName !== undefined && + streamHasher !== undefined; + const digest = checksumRequired ? streamHasher(checksumAlgorithmFn, readableStream) : undefined; + const awsChunkedEncodingStream = new stream_1.Readable({ read: () => { } }); + readableStream.on("data", (data) => { + const length = bodyLengthChecker(data) || 0; + awsChunkedEncodingStream.push(`${length.toString(16)}\r\n`); + awsChunkedEncodingStream.push(data); + awsChunkedEncodingStream.push("\r\n"); + }); + readableStream.on("end", async () => { + awsChunkedEncodingStream.push(`0\r\n`); + if (checksumRequired) { + const checksum = base64Encoder(await digest); + awsChunkedEncodingStream.push(`${checksumLocationName}:${checksum}\r\n`); + awsChunkedEncodingStream.push(`\r\n`); + } + awsChunkedEncodingStream.push(null); + }); + return awsChunkedEncodingStream; +}; +exports.getAwsChunkedEncodingStream = getAwsChunkedEncodingStream; + + +/***/ }), + +/***/ 66: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.headStream = void 0; +async function headStream(stream, bytes) { + var _a; + let byteLengthCounter = 0; + const chunks = []; + const reader = stream.getReader(); + let isDone = false; + while (!isDone) { + const { done, value } = await reader.read(); + if (value) { + chunks.push(value); + byteLengthCounter += (_a = value === null || value === void 0 ? void 0 : value.byteLength) !== null && _a !== void 0 ? _a : 0; + } + if (byteLengthCounter >= bytes) { + break; + } + isDone = done; + } + reader.releaseLock(); + const collected = new Uint8Array(Math.min(bytes, byteLengthCounter)); + let offset = 0; + for (const chunk of chunks) { + if (chunk.byteLength > collected.byteLength - offset) { + collected.set(chunk.subarray(0, collected.byteLength - offset), offset); + break; + } + else { + collected.set(chunk, offset); + } + offset += chunk.length; + } + return collected; +} +exports.headStream = headStream; + + +/***/ }), + +/***/ 8412: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.headStream = void 0; +const stream_1 = __nccwpck_require__(2203); +const headStream_browser_1 = __nccwpck_require__(66); +const stream_type_check_1 = __nccwpck_require__(4414); +const headStream = (stream, bytes) => { + if ((0, stream_type_check_1.isReadableStream)(stream)) { + return (0, headStream_browser_1.headStream)(stream, bytes); + } + return new Promise((resolve, reject) => { + const collector = new Collector(); + collector.limit = bytes; + stream.pipe(collector); + stream.on("error", (err) => { + collector.end(); + reject(err); + }); + collector.on("error", reject); + collector.on("finish", function () { + const bytes = new Uint8Array(Buffer.concat(this.buffers)); + resolve(bytes); + }); + }); +}; +exports.headStream = headStream; +class Collector extends stream_1.Writable { + constructor() { + super(...arguments); + this.buffers = []; + this.limit = Infinity; + this.bytesBuffered = 0; + } + _write(chunk, encoding, callback) { + var _a; + this.buffers.push(chunk); + this.bytesBuffered += (_a = chunk.byteLength) !== null && _a !== void 0 ? _a : 0; + if (this.bytesBuffered >= this.limit) { + const excess = this.bytesBuffered - this.limit; + const tailBuffer = this.buffers[this.buffers.length - 1]; + this.buffers[this.buffers.length - 1] = tailBuffer.subarray(0, tailBuffer.byteLength - excess); + this.emit("finish"); + } + callback(); + } +} + + +/***/ }), + +/***/ 4252: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + Uint8ArrayBlobAdapter: () => Uint8ArrayBlobAdapter +}); +module.exports = __toCommonJS(src_exports); + +// src/blob/transforms.ts +var import_util_base64 = __nccwpck_require__(8385); +var import_util_utf8 = __nccwpck_require__(1577); +function transformToString(payload, encoding = "utf-8") { + if (encoding === "base64") { + return (0, import_util_base64.toBase64)(payload); + } + return (0, import_util_utf8.toUtf8)(payload); +} +__name(transformToString, "transformToString"); +function transformFromString(str, encoding) { + if (encoding === "base64") { + return Uint8ArrayBlobAdapter.mutate((0, import_util_base64.fromBase64)(str)); + } + return Uint8ArrayBlobAdapter.mutate((0, import_util_utf8.fromUtf8)(str)); +} +__name(transformFromString, "transformFromString"); + +// src/blob/Uint8ArrayBlobAdapter.ts +var _Uint8ArrayBlobAdapter = class _Uint8ArrayBlobAdapter extends Uint8Array { + /** + * @param source - such as a string or Stream. + * @returns a new Uint8ArrayBlobAdapter extending Uint8Array. + */ + static fromString(source, encoding = "utf-8") { + switch (typeof source) { + case "string": + return transformFromString(source, encoding); + default: + throw new Error(`Unsupported conversion from ${typeof source} to Uint8ArrayBlobAdapter.`); + } + } + /** + * @param source - Uint8Array to be mutated. + * @returns the same Uint8Array but with prototype switched to Uint8ArrayBlobAdapter. + */ + static mutate(source) { + Object.setPrototypeOf(source, _Uint8ArrayBlobAdapter.prototype); + return source; + } + /** + * @param encoding - default 'utf-8'. + * @returns the blob as string. + */ + transformToString(encoding = "utf-8") { + return transformToString(this, encoding); + } +}; +__name(_Uint8ArrayBlobAdapter, "Uint8ArrayBlobAdapter"); +var Uint8ArrayBlobAdapter = _Uint8ArrayBlobAdapter; + +// src/index.ts +__reExport(src_exports, __nccwpck_require__(6522), module.exports); +__reExport(src_exports, __nccwpck_require__(7201), module.exports); +__reExport(src_exports, __nccwpck_require__(2108), module.exports); +__reExport(src_exports, __nccwpck_require__(8412), module.exports); +__reExport(src_exports, __nccwpck_require__(4414), module.exports); +__reExport(src_exports, __nccwpck_require__(5639), module.exports); +__reExport(src_exports, __nccwpck_require__(1775), module.exports); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 2207: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.sdkStreamMixin = void 0; +const fetch_http_handler_1 = __nccwpck_require__(7809); +const util_base64_1 = __nccwpck_require__(8385); +const util_hex_encoding_1 = __nccwpck_require__(6435); +const util_utf8_1 = __nccwpck_require__(1577); +const stream_type_check_1 = __nccwpck_require__(4414); +const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; +const sdkStreamMixin = (stream) => { + var _a, _b; + if (!isBlobInstance(stream) && !(0, stream_type_check_1.isReadableStream)(stream)) { + const name = ((_b = (_a = stream === null || stream === void 0 ? void 0 : stream.__proto__) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.name) || stream; + throw new Error(`Unexpected stream implementation, expect Blob or ReadableStream, got ${name}`); + } + let transformed = false; + const transformToByteArray = async () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + return await (0, fetch_http_handler_1.streamCollector)(stream); + }; + const blobToWebStream = (blob) => { + if (typeof blob.stream !== "function") { + throw new Error("Cannot transform payload Blob to web stream. Please make sure the Blob.stream() is polyfilled.\n" + + "If you are using React Native, this API is not yet supported, see: https://react-native.canny.io/feature-requests/p/fetch-streaming-body"); + } + return blob.stream(); + }; + return Object.assign(stream, { + transformToByteArray: transformToByteArray, + transformToString: async (encoding) => { + const buf = await transformToByteArray(); + if (encoding === "base64") { + return (0, util_base64_1.toBase64)(buf); + } + else if (encoding === "hex") { + return (0, util_hex_encoding_1.toHex)(buf); + } + else if (encoding === undefined || encoding === "utf8" || encoding === "utf-8") { + return (0, util_utf8_1.toUtf8)(buf); + } + else if (typeof TextDecoder === "function") { + return new TextDecoder(encoding).decode(buf); + } + else { + throw new Error("TextDecoder is not available, please make sure polyfill is provided."); + } + }, + transformToWebStream: () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + if (isBlobInstance(stream)) { + return blobToWebStream(stream); + } + else if ((0, stream_type_check_1.isReadableStream)(stream)) { + return stream; + } + else { + throw new Error(`Cannot transform payload to web stream, got ${stream}`); + } + }, + }); +}; +exports.sdkStreamMixin = sdkStreamMixin; +const isBlobInstance = (stream) => typeof Blob === "function" && stream instanceof Blob; + + +/***/ }), + +/***/ 7201: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.sdkStreamMixin = void 0; +const node_http_handler_1 = __nccwpck_require__(1279); +const util_buffer_from_1 = __nccwpck_require__(4151); +const stream_1 = __nccwpck_require__(2203); +const sdk_stream_mixin_browser_1 = __nccwpck_require__(2207); +const ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED = "The stream has already been transformed."; +const sdkStreamMixin = (stream) => { + var _a, _b; + if (!(stream instanceof stream_1.Readable)) { + try { + return (0, sdk_stream_mixin_browser_1.sdkStreamMixin)(stream); + } + catch (e) { + const name = ((_b = (_a = stream === null || stream === void 0 ? void 0 : stream.__proto__) === null || _a === void 0 ? void 0 : _a.constructor) === null || _b === void 0 ? void 0 : _b.name) || stream; + throw new Error(`Unexpected stream implementation, expect Stream.Readable instance, got ${name}`); + } + } + let transformed = false; + const transformToByteArray = async () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + transformed = true; + return await (0, node_http_handler_1.streamCollector)(stream); + }; + return Object.assign(stream, { + transformToByteArray, + transformToString: async (encoding) => { + const buf = await transformToByteArray(); + if (encoding === undefined || Buffer.isEncoding(encoding)) { + return (0, util_buffer_from_1.fromArrayBuffer)(buf.buffer, buf.byteOffset, buf.byteLength).toString(encoding); + } + else { + const decoder = new TextDecoder(encoding); + return decoder.decode(buf); + } + }, + transformToWebStream: () => { + if (transformed) { + throw new Error(ERR_MSG_STREAM_HAS_BEEN_TRANSFORMED); + } + if (stream.readableFlowing !== null) { + throw new Error("The stream has been consumed by other callbacks."); + } + if (typeof stream_1.Readable.toWeb !== "function") { + throw new Error("Readable.toWeb() is not supported. Please make sure you are using Node.js >= 17.0.0, or polyfill is available."); + } + transformed = true; + return stream_1.Readable.toWeb(stream); + }, + }); +}; +exports.sdkStreamMixin = sdkStreamMixin; + + +/***/ }), + +/***/ 7570: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.splitStream = void 0; +async function splitStream(stream) { + if (typeof stream.stream === "function") { + stream = stream.stream(); + } + const readableStream = stream; + return readableStream.tee(); +} +exports.splitStream = splitStream; + + +/***/ }), + +/***/ 2108: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.splitStream = void 0; +const stream_1 = __nccwpck_require__(2203); +const splitStream_browser_1 = __nccwpck_require__(7570); +const stream_type_check_1 = __nccwpck_require__(4414); +async function splitStream(stream) { + if ((0, stream_type_check_1.isReadableStream)(stream) || (0, stream_type_check_1.isBlob)(stream)) { + return (0, splitStream_browser_1.splitStream)(stream); + } + const stream1 = new stream_1.PassThrough(); + const stream2 = new stream_1.PassThrough(); + stream.pipe(stream1); + stream.pipe(stream2); + return [stream1, stream2]; +} +exports.splitStream = splitStream; + + +/***/ }), + +/***/ 4414: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isBlob = exports.isReadableStream = void 0; +const isReadableStream = (stream) => { + var _a; + return typeof ReadableStream === "function" && + (((_a = stream === null || stream === void 0 ? void 0 : stream.constructor) === null || _a === void 0 ? void 0 : _a.name) === ReadableStream.name || stream instanceof ReadableStream); +}; +exports.isReadableStream = isReadableStream; +const isBlob = (blob) => { + var _a; + return typeof Blob === "function" && (((_a = blob === null || blob === void 0 ? void 0 : blob.constructor) === null || _a === void 0 ? void 0 : _a.name) === Blob.name || blob instanceof Blob); +}; +exports.isBlob = isBlob; + + +/***/ }), + +/***/ 146: +/***/ ((module) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + escapeUri: () => escapeUri, + escapeUriPath: () => escapeUriPath +}); +module.exports = __toCommonJS(src_exports); + +// src/escape-uri.ts +var escapeUri = /* @__PURE__ */ __name((uri) => ( + // AWS percent-encodes some extra non-standard characters in a URI + encodeURIComponent(uri).replace(/[!'()*]/g, hexEncode) +), "escapeUri"); +var hexEncode = /* @__PURE__ */ __name((c) => `%${c.charCodeAt(0).toString(16).toUpperCase()}`, "hexEncode"); + +// src/escape-uri-path.ts +var escapeUriPath = /* @__PURE__ */ __name((uri) => uri.split("/").map(escapeUri).join("/"), "escapeUriPath"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 1577: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// src/index.ts +var src_exports = {}; +__export(src_exports, { + fromUtf8: () => fromUtf8, + toUint8Array: () => toUint8Array, + toUtf8: () => toUtf8 +}); +module.exports = __toCommonJS(src_exports); + +// src/fromUtf8.ts +var import_util_buffer_from = __nccwpck_require__(4151); +var fromUtf8 = /* @__PURE__ */ __name((input) => { + const buf = (0, import_util_buffer_from.fromString)(input, "utf8"); + return new Uint8Array(buf.buffer, buf.byteOffset, buf.byteLength / Uint8Array.BYTES_PER_ELEMENT); +}, "fromUtf8"); + +// src/toUint8Array.ts +var toUint8Array = /* @__PURE__ */ __name((data) => { + if (typeof data === "string") { + return fromUtf8(data); + } + if (ArrayBuffer.isView(data)) { + return new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + return new Uint8Array(data); +}, "toUint8Array"); + +// src/toUtf8.ts + +var toUtf8 = /* @__PURE__ */ __name((input) => { + if (typeof input === "string") { + return input; + } + if (typeof input !== "object" || typeof input.byteOffset !== "number" || typeof input.byteLength !== "number") { + throw new Error("@smithy/util-utf8: toUtf8 encoder function only accepts string | Uint8Array."); + } + return (0, import_util_buffer_from.fromArrayBuffer)(input.buffer, input.byteOffset, input.byteLength).toString("utf8"); +}, "toUtf8"); +// Annotate the CommonJS export names for ESM import in node: + +0 && (0); + + + +/***/ }), + +/***/ 9380: +/***/ ((module) => { + +"use strict"; + +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); + + var r = range(a, b, str); + + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} + +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} + +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; + + if (ai >= 0 && bi > 0) { + if(a===b) { + return [ai, bi]; + } + begs = []; + left = str.length; + + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + + bi = str.indexOf(b, i + 1); + } + + i = ai < bi && ai >= 0 ? ai : bi; + } + + if (begs.length) { + result = [ left, right ]; + } + } + + return result; +} + + +/***/ }), + +/***/ 2732: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var register = __nccwpck_require__(1063); +var addHook = __nccwpck_require__(2027); +var removeHook = __nccwpck_require__(9934); + +// bind with array of arguments: https://stackoverflow.com/a/21792913 +var bind = Function.bind; +var bindable = bind.bind(bind); + +function bindApi(hook, state, name) { + var removeHookRef = bindable(removeHook, null).apply( + null, + name ? [state, name] : [state] + ); + hook.api = { remove: removeHookRef }; + hook.remove = removeHookRef; + ["before", "error", "after", "wrap"].forEach(function (kind) { + var args = name ? [state, kind, name] : [state, kind]; + hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args); + }); +} + +function HookSingular() { + var singularHookName = "h"; + var singularHookState = { + registry: {}, + }; + var singularHook = register.bind(null, singularHookState, singularHookName); + bindApi(singularHook, singularHookState, singularHookName); + return singularHook; +} + +function HookCollection() { + var state = { + registry: {}, + }; + + var hook = register.bind(null, state); + bindApi(hook, state); + + return hook; +} + +var collectionHookDeprecationMessageDisplayed = false; +function Hook() { + if (!collectionHookDeprecationMessageDisplayed) { + console.warn( + '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4' + ); + collectionHookDeprecationMessageDisplayed = true; + } + return HookCollection(); +} + +Hook.Singular = HookSingular.bind(); +Hook.Collection = HookCollection.bind(); + +module.exports = Hook; +// expose constructors as a named property for TypeScript +module.exports.Hook = Hook; +module.exports.Singular = Hook.Singular; +module.exports.Collection = Hook.Collection; + + +/***/ }), + +/***/ 2027: +/***/ ((module) => { + +module.exports = addHook; + +function addHook(state, kind, name, hook) { + var orig = hook; + if (!state.registry[name]) { + state.registry[name] = []; + } + + if (kind === "before") { + hook = function (method, options) { + return Promise.resolve() + .then(orig.bind(null, options)) + .then(method.bind(null, options)); + }; + } + + if (kind === "after") { + hook = function (method, options) { + var result; + return Promise.resolve() + .then(method.bind(null, options)) + .then(function (result_) { + result = result_; + return orig(result, options); + }) + .then(function () { + return result; + }); + }; + } + + if (kind === "error") { + hook = function (method, options) { + return Promise.resolve() + .then(method.bind(null, options)) + .catch(function (error) { + return orig(error, options); + }); + }; + } + + state.registry[name].push({ + hook: hook, + orig: orig, + }); +} + + +/***/ }), + +/***/ 1063: +/***/ ((module) => { + +module.exports = register; + +function register(state, name, method, options) { + if (typeof method !== "function") { + throw new Error("method for before hook must be a function"); + } + + if (!options) { + options = {}; + } + + if (Array.isArray(name)) { + return name.reverse().reduce(function (callback, name) { + return register.bind(null, state, name, callback, options); + }, method)(); + } + + return Promise.resolve().then(function () { + if (!state.registry[name]) { + return method(options); + } + + return state.registry[name].reduce(function (method, registered) { + return registered.hook.bind(null, method, options); + }, method)(); + }); +} + + +/***/ }), + +/***/ 9934: +/***/ ((module) => { + +module.exports = removeHook; + +function removeHook(state, name, method) { + if (!state.registry[name]) { + return; + } + + var index = state.registry[name] + .map(function (registered) { + return registered.orig; + }) + .indexOf(method); + + if (index === -1) { + return; + } + + state.registry[name].splice(index, 1); +} + + +/***/ }), + +/***/ 4691: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var balanced = __nccwpck_require__(9380); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m) return [str]; + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + if (/\$$/.test(m.pre)) { + for (var k = 0; k < post.length; k++) { + var expansion = pre+ '{' + m.body + '}' + post[k]; + expansions.push(expansion); + } + } else { + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = []; + + for (var j = 0; j < n.length; j++) { + N.push.apply(N, expand(n[j], false)); + } + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + } + + return expansions; +} + + + +/***/ }), + +/***/ 8926: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/* + +The MIT License (MIT) + +Original Library + - Copyright (c) Marak Squires + +Additional functionality + - Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +*/ + +var colors = {}; +module['exports'] = colors; + +colors.themes = {}; + +var util = __nccwpck_require__(9023); +var ansiStyles = colors.styles = __nccwpck_require__(3680); +var defineProps = Object.defineProperties; +var newLineRegex = new RegExp(/[\r\n]+/g); + +colors.supportsColor = (__nccwpck_require__(2663).supportsColor); + +if (typeof colors.enabled === 'undefined') { + colors.enabled = colors.supportsColor() !== false; +} + +colors.enable = function() { + colors.enabled = true; +}; + +colors.disable = function() { + colors.enabled = false; +}; + +colors.stripColors = colors.strip = function(str) { + return ('' + str).replace(/\x1B\[\d+m/g, ''); +}; + +// eslint-disable-next-line no-unused-vars +var stylize = colors.stylize = function stylize(str, style) { + if (!colors.enabled) { + return str+''; + } + + var styleMap = ansiStyles[style]; + + // Stylize should work for non-ANSI styles, too + if(!styleMap && style in colors){ + // Style maps like trap operate as functions on strings; + // they don't have properties like open or close. + return colors[style](str); + } + + return styleMap.open + str + styleMap.close; +}; + +var matchOperatorsRe = /[|\\{}()[\]^$+*?.]/g; +var escapeStringRegexp = function(str) { + if (typeof str !== 'string') { + throw new TypeError('Expected a string'); + } + return str.replace(matchOperatorsRe, '\\$&'); +}; + +function build(_styles) { + var builder = function builder() { + return applyStyle.apply(builder, arguments); + }; + builder._styles = _styles; + // __proto__ is used because we must return a function, but there is + // no way to create a function with a different prototype. + builder.__proto__ = proto; + return builder; +} + +var styles = (function() { + var ret = {}; + ansiStyles.grey = ansiStyles.gray; + Object.keys(ansiStyles).forEach(function(key) { + ansiStyles[key].closeRe = + new RegExp(escapeStringRegexp(ansiStyles[key].close), 'g'); + ret[key] = { + get: function() { + return build(this._styles.concat(key)); + }, + }; + }); + return ret; +})(); + +var proto = defineProps(function colors() {}, styles); + +function applyStyle() { + var args = Array.prototype.slice.call(arguments); + + var str = args.map(function(arg) { + // Use weak equality check so we can colorize null/undefined in safe mode + if (arg != null && arg.constructor === String) { + return arg; + } else { + return util.inspect(arg); + } + }).join(' '); + + if (!colors.enabled || !str) { + return str; + } + + var newLinesPresent = str.indexOf('\n') != -1; + + var nestedStyles = this._styles; + + var i = nestedStyles.length; + while (i--) { + var code = ansiStyles[nestedStyles[i]]; + str = code.open + str.replace(code.closeRe, code.open) + code.close; + if (newLinesPresent) { + str = str.replace(newLineRegex, function(match) { + return code.close + match + code.open; + }); + } + } + + return str; +} + +colors.setTheme = function(theme) { + if (typeof theme === 'string') { + console.log('colors.setTheme now only accepts an object, not a string. ' + + 'If you are trying to set a theme from a file, it is now your (the ' + + 'caller\'s) responsibility to require the file. The old syntax ' + + 'looked like colors.setTheme(__dirname + ' + + '\'/../themes/generic-logging.js\'); The new syntax looks like '+ + 'colors.setTheme(require(__dirname + ' + + '\'/../themes/generic-logging.js\'));'); + return; + } + for (var style in theme) { + (function(style) { + colors[style] = function(str) { + if (typeof theme[style] === 'object') { + var out = str; + for (var i in theme[style]) { + out = colors[theme[style][i]](out); + } + return out; + } + return colors[theme[style]](str); + }; + })(style); + } +}; + +function init() { + var ret = {}; + Object.keys(styles).forEach(function(name) { + ret[name] = { + get: function() { + return build([name]); + }, + }; + }); + return ret; +} + +var sequencer = function sequencer(map, str) { + var exploded = str.split(''); + exploded = exploded.map(map); + return exploded.join(''); +}; + +// custom formatter methods +colors.trap = __nccwpck_require__(2255); +colors.zalgo = __nccwpck_require__(2799); + +// maps +colors.maps = {}; +colors.maps.america = __nccwpck_require__(602)(colors); +colors.maps.zebra = __nccwpck_require__(4762)(colors); +colors.maps.rainbow = __nccwpck_require__(7792)(colors); +colors.maps.random = __nccwpck_require__(6407)(colors); + +for (var map in colors.maps) { + (function(map) { + colors[map] = function(str) { + return sequencer(colors.maps[map], str); + }; + })(map); +} + +defineProps(colors, init()); + + +/***/ }), + +/***/ 2255: +/***/ ((module) => { + +module['exports'] = function runTheTrap(text, options) { + var result = ''; + text = text || 'Run the trap, drop the bass'; + text = text.split(''); + var trap = { + a: ['\u0040', '\u0104', '\u023a', '\u0245', '\u0394', '\u039b', '\u0414'], + b: ['\u00df', '\u0181', '\u0243', '\u026e', '\u03b2', '\u0e3f'], + c: ['\u00a9', '\u023b', '\u03fe'], + d: ['\u00d0', '\u018a', '\u0500', '\u0501', '\u0502', '\u0503'], + e: ['\u00cb', '\u0115', '\u018e', '\u0258', '\u03a3', '\u03be', '\u04bc', + '\u0a6c'], + f: ['\u04fa'], + g: ['\u0262'], + h: ['\u0126', '\u0195', '\u04a2', '\u04ba', '\u04c7', '\u050a'], + i: ['\u0f0f'], + j: ['\u0134'], + k: ['\u0138', '\u04a0', '\u04c3', '\u051e'], + l: ['\u0139'], + m: ['\u028d', '\u04cd', '\u04ce', '\u0520', '\u0521', '\u0d69'], + n: ['\u00d1', '\u014b', '\u019d', '\u0376', '\u03a0', '\u048a'], + o: ['\u00d8', '\u00f5', '\u00f8', '\u01fe', '\u0298', '\u047a', '\u05dd', + '\u06dd', '\u0e4f'], + p: ['\u01f7', '\u048e'], + q: ['\u09cd'], + r: ['\u00ae', '\u01a6', '\u0210', '\u024c', '\u0280', '\u042f'], + s: ['\u00a7', '\u03de', '\u03df', '\u03e8'], + t: ['\u0141', '\u0166', '\u0373'], + u: ['\u01b1', '\u054d'], + v: ['\u05d8'], + w: ['\u0428', '\u0460', '\u047c', '\u0d70'], + x: ['\u04b2', '\u04fe', '\u04fc', '\u04fd'], + y: ['\u00a5', '\u04b0', '\u04cb'], + z: ['\u01b5', '\u0240'], + }; + text.forEach(function(c) { + c = c.toLowerCase(); + var chars = trap[c] || [' ']; + var rand = Math.floor(Math.random() * chars.length); + if (typeof trap[c] !== 'undefined') { + result += trap[c][rand]; + } else { + result += c; + } + }); + return result; +}; + + +/***/ }), + +/***/ 2799: +/***/ ((module) => { + +// please no +module['exports'] = function zalgo(text, options) { + text = text || ' he is here '; + var soul = { + 'up': [ + '̍', '̎', '̄', '̅', + '̿', '̑', '̆', '̐', + '͒', '͗', '͑', '̇', + '̈', '̊', '͂', '̓', + '̈', '͊', '͋', '͌', + '̃', '̂', '̌', '͐', + '̀', '́', '̋', '̏', + '̒', '̓', '̔', '̽', + '̉', 'ͣ', 'ͤ', 'ͥ', + 'ͦ', 'ͧ', 'ͨ', 'ͩ', + 'ͪ', 'ͫ', 'ͬ', 'ͭ', + 'ͮ', 'ͯ', '̾', '͛', + '͆', '̚', + ], + 'down': [ + '̖', '̗', '̘', '̙', + '̜', '̝', '̞', '̟', + '̠', '̤', '̥', '̦', + '̩', '̪', '̫', '̬', + '̭', '̮', '̯', '̰', + '̱', '̲', '̳', '̹', + '̺', '̻', '̼', 'ͅ', + '͇', '͈', '͉', '͍', + '͎', '͓', '͔', '͕', + '͖', '͙', '͚', '̣', + ], + 'mid': [ + '̕', '̛', '̀', '́', + '͘', '̡', '̢', '̧', + '̨', '̴', '̵', '̶', + '͜', '͝', '͞', + '͟', '͠', '͢', '̸', + '̷', '͡', ' ҉', + ], + }; + var all = [].concat(soul.up, soul.down, soul.mid); + + function randomNumber(range) { + var r = Math.floor(Math.random() * range); + return r; + } + + function isChar(character) { + var bool = false; + all.filter(function(i) { + bool = (i === character); + }); + return bool; + } + + + function heComes(text, options) { + var result = ''; + var counts; + var l; + options = options || {}; + options['up'] = + typeof options['up'] !== 'undefined' ? options['up'] : true; + options['mid'] = + typeof options['mid'] !== 'undefined' ? options['mid'] : true; + options['down'] = + typeof options['down'] !== 'undefined' ? options['down'] : true; + options['size'] = + typeof options['size'] !== 'undefined' ? options['size'] : 'maxi'; + text = text.split(''); + for (l in text) { + if (isChar(l)) { + continue; + } + result = result + text[l]; + counts = {'up': 0, 'down': 0, 'mid': 0}; + switch (options.size) { + case 'mini': + counts.up = randomNumber(8); + counts.mid = randomNumber(2); + counts.down = randomNumber(8); + break; + case 'maxi': + counts.up = randomNumber(16) + 3; + counts.mid = randomNumber(4) + 1; + counts.down = randomNumber(64) + 3; + break; + default: + counts.up = randomNumber(8) + 1; + counts.mid = randomNumber(6) / 2; + counts.down = randomNumber(8) + 1; + break; + } + + var arr = ['up', 'mid', 'down']; + for (var d in arr) { + var index = arr[d]; + for (var i = 0; i <= counts[index]; i++) { + if (options[index]) { + result = result + soul[index][randomNumber(soul[index].length)]; + } + } + } + } + return result; + } + // don't summon him + return heComes(text, options); +}; + + + +/***/ }), + +/***/ 602: +/***/ ((module) => { + +module['exports'] = function(colors) { + return function(letter, i, exploded) { + if (letter === ' ') return letter; + switch (i%3) { + case 0: return colors.red(letter); + case 1: return colors.white(letter); + case 2: return colors.blue(letter); + } + }; +}; + + +/***/ }), + +/***/ 7792: +/***/ ((module) => { + +module['exports'] = function(colors) { + // RoY G BiV + var rainbowColors = ['red', 'yellow', 'green', 'blue', 'magenta']; + return function(letter, i, exploded) { + if (letter === ' ') { + return letter; + } else { + return colors[rainbowColors[i++ % rainbowColors.length]](letter); + } + }; +}; + + + +/***/ }), + +/***/ 6407: +/***/ ((module) => { + +module['exports'] = function(colors) { + var available = ['underline', 'inverse', 'grey', 'yellow', 'red', 'green', + 'blue', 'white', 'cyan', 'magenta', 'brightYellow', 'brightRed', + 'brightGreen', 'brightBlue', 'brightWhite', 'brightCyan', 'brightMagenta']; + return function(letter, i, exploded) { + return letter === ' ' ? letter : + colors[ + available[Math.round(Math.random() * (available.length - 2))] + ](letter); + }; +}; + + +/***/ }), + +/***/ 4762: +/***/ ((module) => { + +module['exports'] = function(colors) { + return function(letter, i, exploded) { + return i % 2 === 0 ? letter : colors.inverse(letter); + }; +}; + + +/***/ }), + +/***/ 3680: +/***/ ((module) => { + +/* +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +*/ + +var styles = {}; +module['exports'] = styles; + +var codes = { + reset: [0, 0], + + bold: [1, 22], + dim: [2, 22], + italic: [3, 23], + underline: [4, 24], + inverse: [7, 27], + hidden: [8, 28], + strikethrough: [9, 29], + + black: [30, 39], + red: [31, 39], + green: [32, 39], + yellow: [33, 39], + blue: [34, 39], + magenta: [35, 39], + cyan: [36, 39], + white: [37, 39], + gray: [90, 39], + grey: [90, 39], + + brightRed: [91, 39], + brightGreen: [92, 39], + brightYellow: [93, 39], + brightBlue: [94, 39], + brightMagenta: [95, 39], + brightCyan: [96, 39], + brightWhite: [97, 39], + + bgBlack: [40, 49], + bgRed: [41, 49], + bgGreen: [42, 49], + bgYellow: [43, 49], + bgBlue: [44, 49], + bgMagenta: [45, 49], + bgCyan: [46, 49], + bgWhite: [47, 49], + bgGray: [100, 49], + bgGrey: [100, 49], + + bgBrightRed: [101, 49], + bgBrightGreen: [102, 49], + bgBrightYellow: [103, 49], + bgBrightBlue: [104, 49], + bgBrightMagenta: [105, 49], + bgBrightCyan: [106, 49], + bgBrightWhite: [107, 49], + + // legacy styles for colors pre v1.0.0 + blackBG: [40, 49], + redBG: [41, 49], + greenBG: [42, 49], + yellowBG: [43, 49], + blueBG: [44, 49], + magentaBG: [45, 49], + cyanBG: [46, 49], + whiteBG: [47, 49], + +}; + +Object.keys(codes).forEach(function(key) { + var val = codes[key]; + var style = styles[key] = []; + style.open = '\u001b[' + val[0] + 'm'; + style.close = '\u001b[' + val[1] + 'm'; +}); + + +/***/ }), + +/***/ 7755: +/***/ ((module) => { + +"use strict"; +/* +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +*/ + + + +module.exports = function(flag, argv) { + argv = argv || process.argv; + + var terminatorPos = argv.indexOf('--'); + var prefix = /^-{1,2}/.test(flag) ? '' : '--'; + var pos = argv.indexOf(prefix + flag); + + return pos !== -1 && (terminatorPos === -1 ? true : pos < terminatorPos); +}; + + +/***/ }), + +/***/ 2663: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +/* +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +*/ + + + +var os = __nccwpck_require__(857); +var hasFlag = __nccwpck_require__(7755); + +var env = process.env; + +var forceColor = void 0; +if (hasFlag('no-color') || hasFlag('no-colors') || hasFlag('color=false')) { + forceColor = false; +} else if (hasFlag('color') || hasFlag('colors') || hasFlag('color=true') + || hasFlag('color=always')) { + forceColor = true; +} +if ('FORCE_COLOR' in env) { + forceColor = env.FORCE_COLOR.length === 0 + || parseInt(env.FORCE_COLOR, 10) !== 0; +} + +function translateLevel(level) { + if (level === 0) { + return false; + } + + return { + level: level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3, + }; +} + +function supportsColor(stream) { + if (forceColor === false) { + return 0; + } + + if (hasFlag('color=16m') || hasFlag('color=full') + || hasFlag('color=truecolor')) { + return 3; + } + + if (hasFlag('color=256')) { + return 2; + } + + if (stream && !stream.isTTY && forceColor !== true) { + return 0; + } + + var min = forceColor ? 1 : 0; + + if (process.platform === 'win32') { + // Node.js 7.5.0 is the first version of Node.js to include a patch to + // libuv that enables 256 color output on Windows. Anything earlier and it + // won't work. However, here we target Node.js 8 at minimum as it is an LTS + // release, and Node.js 7 is not. Windows 10 build 10586 is the first + // Windows release that supports 256 colors. Windows 10 build 14931 is the + // first release that supports 16m/TrueColor. + var osRelease = os.release().split('.'); + if (Number(process.versions.node.split('.')[0]) >= 8 + && Number(osRelease[0]) >= 10 && Number(osRelease[2]) >= 10586) { + return Number(osRelease[2]) >= 14931 ? 3 : 2; + } + + return 1; + } + + if ('CI' in env) { + if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI'].some(function(sign) { + return sign in env; + }) || env.CI_NAME === 'codeship') { + return 1; + } + + return min; + } + + if ('TEAMCITY_VERSION' in env) { + return (/^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0 + ); + } + + if ('TERM_PROGRAM' in env) { + var version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10); + + switch (env.TERM_PROGRAM) { + case 'iTerm.app': + return version >= 3 ? 3 : 2; + case 'Hyper': + return 3; + case 'Apple_Terminal': + return 2; + // No default + } + } + + if (/-256(color)?$/i.test(env.TERM)) { + return 2; + } + + if (/^screen|^xterm|^vt100|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { + return 1; + } + + if ('COLORTERM' in env) { + return 1; + } + + if (env.TERM === 'dumb') { + return min; + } + + return min; +} + +function getSupportLevel(stream) { + var level = supportsColor(stream); + return translateLevel(level); +} + +module.exports = { + supportsColor: getSupportLevel, + stdout: getSupportLevel(process.stdout), + stderr: getSupportLevel(process.stderr), +}; + + +/***/ }), + +/***/ 4095: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +// +// Remark: Requiring this file will use the "safe" colors API, +// which will not touch String.prototype. +// +// var colors = require('colors/safe'); +// colors.red("foo") +// +// +var colors = __nccwpck_require__(8926); +module['exports'] = colors; + + +/***/ }), + +/***/ 4150: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = 'Deprecation'; + } + +} + +exports.Deprecation = Deprecation; + + +/***/ }), + +/***/ 9741: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const validator = __nccwpck_require__(9433); +const XMLParser = __nccwpck_require__(9844); +const XMLBuilder = __nccwpck_require__(659); + +module.exports = { + XMLParser: XMLParser, + XMLValidator: validator, + XMLBuilder: XMLBuilder +} + +/***/ }), + +/***/ 7019: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD'; +const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040'; +const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*' +const regexName = new RegExp('^' + nameRegexp + '$'); + +const getAllMatches = function(string, regex) { + const matches = []; + let match = regex.exec(string); + while (match) { + const allmatches = []; + allmatches.startIndex = regex.lastIndex - match[0].length; + const len = match.length; + for (let index = 0; index < len; index++) { + allmatches.push(match[index]); + } + matches.push(allmatches); + match = regex.exec(string); + } + return matches; +}; + +const isName = function(string) { + const match = regexName.exec(string); + return !(match === null || typeof match === 'undefined'); +}; + +exports.isExist = function(v) { + return typeof v !== 'undefined'; +}; + +exports.isEmptyObject = function(obj) { + return Object.keys(obj).length === 0; +}; + +/** + * Copy all the properties of a into b. + * @param {*} target + * @param {*} a + */ +exports.merge = function(target, a, arrayMode) { + if (a) { + const keys = Object.keys(a); // will return an array of own properties + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + if (arrayMode === 'strict') { + target[keys[i]] = [ a[keys[i]] ]; + } else { + target[keys[i]] = a[keys[i]]; + } + } + } +}; +/* exports.merge =function (b,a){ + return Object.assign(b,a); +} */ + +exports.getValue = function(v) { + if (exports.isExist(v)) { + return v; + } else { + return ''; + } +}; + +// const fakeCall = function(a) {return a;}; +// const fakeCallNoReturn = function() {}; + +exports.isName = isName; +exports.getAllMatches = getAllMatches; +exports.nameRegexp = nameRegexp; + + +/***/ }), + +/***/ 9433: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +const util = __nccwpck_require__(7019); + +const defaultOptions = { + allowBooleanAttributes: false, //A tag can have attributes without any value + unpairedTags: [] +}; + +//const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g"); +exports.validate = function (xmlData, options) { + options = Object.assign({}, defaultOptions, options); + + //xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line + //xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag + //xmlData = xmlData.replace(/()/g,"");//Remove DOCTYPE + const tags = []; + let tagFound = false; + + //indicates that the root tag has been closed (aka. depth 0 has been reached) + let reachedRoot = false; + + if (xmlData[0] === '\ufeff') { + // check for byte order mark (BOM) + xmlData = xmlData.substr(1); + } + + for (let i = 0; i < xmlData.length; i++) { + + if (xmlData[i] === '<' && xmlData[i+1] === '?') { + i+=2; + i = readPI(xmlData,i); + if (i.err) return i; + }else if (xmlData[i] === '<') { + //starting of tag + //read until you reach to '>' avoiding any '>' in attribute value + let tagStartPos = i; + i++; + + if (xmlData[i] === '!') { + i = readCommentAndCDATA(xmlData, i); + continue; + } else { + let closingTag = false; + if (xmlData[i] === '/') { + //closing tag + closingTag = true; + i++; + } + //read tagname + let tagName = ''; + for (; i < xmlData.length && + xmlData[i] !== '>' && + xmlData[i] !== ' ' && + xmlData[i] !== '\t' && + xmlData[i] !== '\n' && + xmlData[i] !== '\r'; i++ + ) { + tagName += xmlData[i]; + } + tagName = tagName.trim(); + //console.log(tagName); + + if (tagName[tagName.length - 1] === '/') { + //self closing tag without attributes + tagName = tagName.substring(0, tagName.length - 1); + //continue; + i--; + } + if (!validateTagName(tagName)) { + let msg; + if (tagName.trim().length === 0) { + msg = "Invalid space after '<'."; + } else { + msg = "Tag '"+tagName+"' is an invalid name."; + } + return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i)); + } + + const result = readAttributeStr(xmlData, i); + if (result === false) { + return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i)); + } + let attrStr = result.value; + i = result.index; + + if (attrStr[attrStr.length - 1] === '/') { + //self closing tag + const attrStrStart = i - attrStr.length; + attrStr = attrStr.substring(0, attrStr.length - 1); + const isValid = validateAttributeString(attrStr, options); + if (isValid === true) { + tagFound = true; + //continue; //text may presents after self closing tag + } else { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line)); + } + } else if (closingTag) { + if (!result.tagClosed) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i)); + } else if (attrStr.trim().length > 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, tagStartPos)); + } else if (tags.length === 0) { + return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' has not been opened.", getLineNumberForPosition(xmlData, tagStartPos)); + } else { + const otg = tags.pop(); + if (tagName !== otg.tagName) { + let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos); + return getErrorObject('InvalidTag', + "Expected closing tag '"+otg.tagName+"' (opened in line "+openPos.line+", col "+openPos.col+") instead of closing tag '"+tagName+"'.", + getLineNumberForPosition(xmlData, tagStartPos)); + } + + //when there are no more tags, we reached the root level. + if (tags.length == 0) { + reachedRoot = true; + } + } + } else { + const isValid = validateAttributeString(attrStr, options); + if (isValid !== true) { + //the result from the nested function returns the position of the error within the attribute + //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute + //this gives us the absolute index in the entire xml, which we can use to find the line at last + return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line)); + } + + //if the root level has been reached before ... + if (reachedRoot === true) { + return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i)); + } else if(options.unpairedTags.indexOf(tagName) !== -1){ + //don't push into stack + } else { + tags.push({tagName, tagStartPos}); + } + tagFound = true; + } + + //skip tag text value + //It may include comments and CDATA value + for (i++; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + if (xmlData[i + 1] === '!') { + //comment or CADATA + i++; + i = readCommentAndCDATA(xmlData, i); + continue; + } else if (xmlData[i+1] === '?') { + i = readPI(xmlData, ++i); + if (i.err) return i; + } else{ + break; + } + } else if (xmlData[i] === '&') { + const afterAmp = validateAmpersand(xmlData, i); + if (afterAmp == -1) + return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i)); + i = afterAmp; + }else{ + if (reachedRoot === true && !isWhiteSpace(xmlData[i])) { + return getErrorObject('InvalidXml', "Extra text at the end", getLineNumberForPosition(xmlData, i)); + } + } + } //end of reading tag text value + if (xmlData[i] === '<') { + i--; + } + } + } else { + if ( isWhiteSpace(xmlData[i])) { + continue; + } + return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i)); + } + } + + if (!tagFound) { + return getErrorObject('InvalidXml', 'Start tag expected.', 1); + }else if (tags.length == 1) { + return getErrorObject('InvalidTag', "Unclosed tag '"+tags[0].tagName+"'.", getLineNumberForPosition(xmlData, tags[0].tagStartPos)); + }else if (tags.length > 0) { + return getErrorObject('InvalidXml', "Invalid '"+ + JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\r?\n/g, '')+ + "' found.", {line: 1, col: 1}); + } + + return true; +}; + +function isWhiteSpace(char){ + return char === ' ' || char === '\t' || char === '\n' || char === '\r'; +} +/** + * Read Processing insstructions and skip + * @param {*} xmlData + * @param {*} i + */ +function readPI(xmlData, i) { + const start = i; + for (; i < xmlData.length; i++) { + if (xmlData[i] == '?' || xmlData[i] == ' ') { + //tagname + const tagname = xmlData.substr(start, i - start); + if (i > 5 && tagname === 'xml') { + return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i)); + } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') { + //check if valid attribut string + i++; + break; + } else { + continue; + } + } + } + return i; +} + +function readCommentAndCDATA(xmlData, i) { + if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') { + //comment + for (i += 3; i < xmlData.length; i++) { + if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } else if ( + xmlData.length > i + 8 && + xmlData[i + 1] === 'D' && + xmlData[i + 2] === 'O' && + xmlData[i + 3] === 'C' && + xmlData[i + 4] === 'T' && + xmlData[i + 5] === 'Y' && + xmlData[i + 6] === 'P' && + xmlData[i + 7] === 'E' + ) { + let angleBracketsCount = 1; + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === '<') { + angleBracketsCount++; + } else if (xmlData[i] === '>') { + angleBracketsCount--; + if (angleBracketsCount === 0) { + break; + } + } + } + } else if ( + xmlData.length > i + 9 && + xmlData[i + 1] === '[' && + xmlData[i + 2] === 'C' && + xmlData[i + 3] === 'D' && + xmlData[i + 4] === 'A' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'A' && + xmlData[i + 7] === '[' + ) { + for (i += 8; i < xmlData.length; i++) { + if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') { + i += 2; + break; + } + } + } + + return i; +} + +const doubleQuote = '"'; +const singleQuote = "'"; + +/** + * Keep reading xmlData until '<' is found outside the attribute value. + * @param {string} xmlData + * @param {number} i + */ +function readAttributeStr(xmlData, i) { + let attrStr = ''; + let startChar = ''; + let tagClosed = false; + for (; i < xmlData.length; i++) { + if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) { + if (startChar === '') { + startChar = xmlData[i]; + } else if (startChar !== xmlData[i]) { + //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa + } else { + startChar = ''; + } + } else if (xmlData[i] === '>') { + if (startChar === '') { + tagClosed = true; + break; + } + } + attrStr += xmlData[i]; + } + if (startChar !== '') { + return false; + } + + return { + value: attrStr, + index: i, + tagClosed: tagClosed + }; +} + +/** + * Select all the attributes whether valid or invalid. + */ +const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g'); + +//attr, ="sd", a="amit's", a="sd"b="saf", ab cd="" + +function validateAttributeString(attrStr, options) { + //console.log("start:"+attrStr+":end"); + + //if(attrStr.trim().length === 0) return true; //empty string + + const matches = util.getAllMatches(attrStr, validAttrStrRegxp); + const attrNames = {}; + + for (let i = 0; i < matches.length; i++) { + if (matches[i][1].length === 0) { + //nospace before attribute name: a="sd"b="saf" + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(matches[i])) + } else if (matches[i][3] !== undefined && matches[i][4] === undefined) { + return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' is without value.", getPositionFromMatch(matches[i])); + } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) { + //independent attribute: ab + return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(matches[i])); + } + /* else if(matches[i][6] === undefined){//attribute without value: ab= + return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}}; + } */ + const attrName = matches[i][2]; + if (!validateAttrName(attrName)) { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(matches[i])); + } + if (!attrNames.hasOwnProperty(attrName)) { + //check for duplicate attribute. + attrNames[attrName] = 1; + } else { + return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(matches[i])); + } + } + + return true; +} + +function validateNumberAmpersand(xmlData, i) { + let re = /\d/; + if (xmlData[i] === 'x') { + i++; + re = /[\da-fA-F]/; + } + for (; i < xmlData.length; i++) { + if (xmlData[i] === ';') + return i; + if (!xmlData[i].match(re)) + break; + } + return -1; +} + +function validateAmpersand(xmlData, i) { + // https://www.w3.org/TR/xml/#dt-charref + i++; + if (xmlData[i] === ';') + return -1; + if (xmlData[i] === '#') { + i++; + return validateNumberAmpersand(xmlData, i); + } + let count = 0; + for (; i < xmlData.length; i++, count++) { + if (xmlData[i].match(/\w/) && count < 20) + continue; + if (xmlData[i] === ';') + break; + return -1; + } + return i; +} + +function getErrorObject(code, message, lineNumber) { + return { + err: { + code: code, + msg: message, + line: lineNumber.line || lineNumber, + col: lineNumber.col, + }, + }; +} + +function validateAttrName(attrName) { + return util.isName(attrName); +} + +// const startsWithXML = /^xml/i; + +function validateTagName(tagname) { + return util.isName(tagname) /* && !tagname.match(startsWithXML) */; +} + +//this function returns the line number for the character at the given index +function getLineNumberForPosition(xmlData, index) { + const lines = xmlData.substring(0, index).split(/\r?\n/); + return { + line: lines.length, + + // column number is last line's length + 1, because column numbering starts at 1: + col: lines[lines.length - 1].length + 1 + }; +} + +//this function returns the position of the first character of match within attrStr +function getPositionFromMatch(match) { + return match.startIndex + match[1].length; +} + + +/***/ }), + +/***/ 659: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +//parse Empty Node as self closing node +const buildFromOrderedJs = __nccwpck_require__(3997); + +const defaultOptions = { + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + cdataPropName: false, + format: false, + indentBy: ' ', + suppressEmptyNode: false, + suppressUnpairedNode: true, + suppressBooleanAttributes: true, + tagValueProcessor: function(key, a) { + return a; + }, + attributeValueProcessor: function(attrName, a) { + return a; + }, + preserveOrder: false, + commentPropName: false, + unpairedTags: [], + entities: [ + { regex: new RegExp("&", "g"), val: "&" },//it must be on top + { regex: new RegExp(">", "g"), val: ">" }, + { regex: new RegExp("<", "g"), val: "<" }, + { regex: new RegExp("\'", "g"), val: "'" }, + { regex: new RegExp("\"", "g"), val: """ } + ], + processEntities: true, + stopNodes: [], + // transformTagName: false, + // transformAttributeName: false, + oneListGroup: false +}; + +function Builder(options) { + this.options = Object.assign({}, defaultOptions, options); + if (this.options.ignoreAttributes || this.options.attributesGroupName) { + this.isAttribute = function(/*a*/) { + return false; + }; + } else { + this.attrPrefixLen = this.options.attributeNamePrefix.length; + this.isAttribute = isAttribute; + } + + this.processTextOrObjNode = processTextOrObjNode + + if (this.options.format) { + this.indentate = indentate; + this.tagEndChar = '>\n'; + this.newLine = '\n'; + } else { + this.indentate = function() { + return ''; + }; + this.tagEndChar = '>'; + this.newLine = ''; + } +} + +Builder.prototype.build = function(jObj) { + if(this.options.preserveOrder){ + return buildFromOrderedJs(jObj, this.options); + }else { + if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){ + jObj = { + [this.options.arrayNodeName] : jObj + } + } + return this.j2x(jObj, 0).val; + } +}; + +Builder.prototype.j2x = function(jObj, level) { + let attrStr = ''; + let val = ''; + for (let key in jObj) { + if(!Object.prototype.hasOwnProperty.call(jObj, key)) continue; + if (typeof jObj[key] === 'undefined') { + // supress undefined node only if it is not an attribute + if (this.isAttribute(key)) { + val += ''; + } + } else if (jObj[key] === null) { + // null attribute should be ignored by the attribute list, but should not cause the tag closing + if (this.isAttribute(key)) { + val += ''; + } else if (key[0] === '?') { + val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + } else { + val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (jObj[key] instanceof Date) { + val += this.buildTextValNode(jObj[key], key, '', level); + } else if (typeof jObj[key] !== 'object') { + //premitive type + const attr = this.isAttribute(key); + if (attr) { + attrStr += this.buildAttrPairStr(attr, '' + jObj[key]); + }else { + //tag value + if (key === this.options.textNodeName) { + let newval = this.options.tagValueProcessor(key, '' + jObj[key]); + val += this.replaceEntitiesValue(newval); + } else { + val += this.buildTextValNode(jObj[key], key, '', level); + } + } + } else if (Array.isArray(jObj[key])) { + //repeated nodes + const arrLen = jObj[key].length; + let listTagVal = ""; + let listTagAttr = ""; + for (let j = 0; j < arrLen; j++) { + const item = jObj[key][j]; + if (typeof item === 'undefined') { + // supress undefined node + } else if (item === null) { + if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; + else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; + } else if (typeof item === 'object') { + if(this.options.oneListGroup){ + const result = this.j2x(item, level + 1); + listTagVal += result.val; + if (this.options.attributesGroupName && item.hasOwnProperty(this.options.attributesGroupName)) { + listTagAttr += result.attrStr + } + }else{ + listTagVal += this.processTextOrObjNode(item, key, level) + } + } else { + if (this.options.oneListGroup) { + let textValue = this.options.tagValueProcessor(key, item); + textValue = this.replaceEntitiesValue(textValue); + listTagVal += textValue; + } else { + listTagVal += this.buildTextValNode(item, key, '', level); + } + } + } + if(this.options.oneListGroup){ + listTagVal = this.buildObjectNode(listTagVal, key, listTagAttr, level); + } + val += listTagVal; + } else { + //nested node + if (this.options.attributesGroupName && key === this.options.attributesGroupName) { + const Ks = Object.keys(jObj[key]); + const L = Ks.length; + for (let j = 0; j < L; j++) { + attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]); + } + } else { + val += this.processTextOrObjNode(jObj[key], key, level) + } + } + } + return {attrStr: attrStr, val: val}; +}; + +Builder.prototype.buildAttrPairStr = function(attrName, val){ + val = this.options.attributeValueProcessor(attrName, '' + val); + val = this.replaceEntitiesValue(val); + if (this.options.suppressBooleanAttributes && val === "true") { + return ' ' + attrName; + } else return ' ' + attrName + '="' + val + '"'; +} + +function processTextOrObjNode (object, key, level) { + const result = this.j2x(object, level + 1); + if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) { + return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level); + } else { + return this.buildObjectNode(result.val, key, result.attrStr, level); + } +} + +Builder.prototype.buildObjectNode = function(val, key, attrStr, level) { + if(val === ""){ + if(key[0] === "?") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + else { + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + } + }else{ + + let tagEndExp = '' + val + tagEndExp ); + } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) { + return this.indentate(level) + `` + this.newLine; + }else { + return ( + this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar + + val + + this.indentate(level) + tagEndExp ); + } + } +} + +Builder.prototype.closeTag = function(key){ + let closeTag = ""; + if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired + if(!this.options.suppressUnpairedNode) closeTag = "/" + }else if(this.options.suppressEmptyNode){ //empty + closeTag = "/"; + }else{ + closeTag = `>` + this.newLine; + }else if (this.options.commentPropName !== false && key === this.options.commentPropName) { + return this.indentate(level) + `` + this.newLine; + }else if(key[0] === "?") {//PI tag + return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; + }else{ + let textValue = this.options.tagValueProcessor(key, val); + textValue = this.replaceEntitiesValue(textValue); + + if( textValue === ''){ + return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; + }else{ + return this.indentate(level) + '<' + key + attrStr + '>' + + textValue + + ' 0 && this.options.processEntities){ + for (let i=0; i { + +const EOL = "\n"; + +/** + * + * @param {array} jArray + * @param {any} options + * @returns + */ +function toXml(jArray, options) { + let indentation = ""; + if (options.format && options.indentBy.length > 0) { + indentation = EOL; + } + return arrToStr(jArray, options, "", indentation); +} + +function arrToStr(arr, options, jPath, indentation) { + let xmlStr = ""; + let isPreviousElementTag = false; + + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const tagName = propName(tagObj); + if(tagName === undefined) continue; + + let newJPath = ""; + if (jPath.length === 0) newJPath = tagName + else newJPath = `${jPath}.${tagName}`; + + if (tagName === options.textNodeName) { + let tagText = tagObj[tagName]; + if (!isStopNode(newJPath, options)) { + tagText = options.tagValueProcessor(tagName, tagText); + tagText = replaceEntitiesValue(tagText, options); + } + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += tagText; + isPreviousElementTag = false; + continue; + } else if (tagName === options.cdataPropName) { + if (isPreviousElementTag) { + xmlStr += indentation; + } + xmlStr += ``; + isPreviousElementTag = false; + continue; + } else if (tagName === options.commentPropName) { + xmlStr += indentation + ``; + isPreviousElementTag = true; + continue; + } else if (tagName[0] === "?") { + const attStr = attr_to_str(tagObj[":@"], options); + const tempInd = tagName === "?xml" ? "" : indentation; + let piTextNodeName = tagObj[tagName][0][options.textNodeName]; + piTextNodeName = piTextNodeName.length !== 0 ? " " + piTextNodeName : ""; //remove extra spacing + xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`; + isPreviousElementTag = true; + continue; + } + let newIdentation = indentation; + if (newIdentation !== "") { + newIdentation += options.indentBy; + } + const attStr = attr_to_str(tagObj[":@"], options); + const tagStart = indentation + `<${tagName}${attStr}`; + const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation); + if (options.unpairedTags.indexOf(tagName) !== -1) { + if (options.suppressUnpairedNode) xmlStr += tagStart + ">"; + else xmlStr += tagStart + "/>"; + } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) { + xmlStr += tagStart + "/>"; + } else if (tagValue && tagValue.endsWith(">")) { + xmlStr += tagStart + `>${tagValue}${indentation}`; + } else { + xmlStr += tagStart + ">"; + if (tagValue && indentation !== "" && (tagValue.includes("/>") || tagValue.includes("`; + } + isPreviousElementTag = true; + } + + return xmlStr; +} + +function propName(obj) { + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(!obj.hasOwnProperty(key)) continue; + if (key !== ":@") return key; + } +} + +function attr_to_str(attrMap, options) { + let attrStr = ""; + if (attrMap && !options.ignoreAttributes) { + for (let attr in attrMap) { + if(!attrMap.hasOwnProperty(attr)) continue; + let attrVal = options.attributeValueProcessor(attr, attrMap[attr]); + attrVal = replaceEntitiesValue(attrVal, options); + if (attrVal === true && options.suppressBooleanAttributes) { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`; + } else { + attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}="${attrVal}"`; + } + } + } + return attrStr; +} + +function isStopNode(jPath, options) { + jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1); + let tagName = jPath.substr(jPath.lastIndexOf(".") + 1); + for (let index in options.stopNodes) { + if (options.stopNodes[index] === jPath || options.stopNodes[index] === "*." + tagName) return true; + } + return false; +} + +function replaceEntitiesValue(textValue, options) { + if (textValue && textValue.length > 0 && options.processEntities) { + for (let i = 0; i < options.entities.length; i++) { + const entity = options.entities[i]; + textValue = textValue.replace(entity.regex, entity.val); + } + } + return textValue; +} +module.exports = toXml; + + +/***/ }), + +/***/ 151: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const util = __nccwpck_require__(7019); + +//TODO: handle comments +function readDocType(xmlData, i){ + + const entities = {}; + if( xmlData[i + 3] === 'O' && + xmlData[i + 4] === 'C' && + xmlData[i + 5] === 'T' && + xmlData[i + 6] === 'Y' && + xmlData[i + 7] === 'P' && + xmlData[i + 8] === 'E') + { + i = i+9; + let angleBracketsCount = 1; + let hasBody = false, comment = false; + let exp = ""; + for(;i') { //Read tag content + if(comment){ + if( xmlData[i - 1] === "-" && xmlData[i - 2] === "-"){ + comment = false; + angleBracketsCount--; + } + }else{ + angleBracketsCount--; + } + if (angleBracketsCount === 0) { + break; + } + }else if( xmlData[i] === '['){ + hasBody = true; + }else{ + exp += xmlData[i]; + } + } + if(angleBracketsCount !== 0){ + throw new Error(`Unclosed DOCTYPE`); + } + }else{ + throw new Error(`Invalid Tag instead of DOCTYPE`); + } + return {entities, i}; +} + +function readEntityExp(xmlData,i){ + //External entities are not supported + // + + //Parameter entities are not supported + // + + //Internal entities are supported + // + + //read EntityName + let entityName = ""; + for (; i < xmlData.length && (xmlData[i] !== "'" && xmlData[i] !== '"' ); i++) { + // if(xmlData[i] === " ") continue; + // else + entityName += xmlData[i]; + } + entityName = entityName.trim(); + if(entityName.indexOf(" ") !== -1) throw new Error("External entites are not supported"); + + //read Entity Value + const startChar = xmlData[i++]; + let val = "" + for (; i < xmlData.length && xmlData[i] !== startChar ; i++) { + val += xmlData[i]; + } + return [entityName, val, i]; +} + +function isComment(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === '-' && + xmlData[i+3] === '-') return true + return false +} +function isEntity(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'N' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'I' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'Y') return true + return false +} +function isElement(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'E' && + xmlData[i+3] === 'L' && + xmlData[i+4] === 'E' && + xmlData[i+5] === 'M' && + xmlData[i+6] === 'E' && + xmlData[i+7] === 'N' && + xmlData[i+8] === 'T') return true + return false +} + +function isAttlist(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'A' && + xmlData[i+3] === 'T' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'L' && + xmlData[i+6] === 'I' && + xmlData[i+7] === 'S' && + xmlData[i+8] === 'T') return true + return false +} +function isNotation(xmlData, i){ + if(xmlData[i+1] === '!' && + xmlData[i+2] === 'N' && + xmlData[i+3] === 'O' && + xmlData[i+4] === 'T' && + xmlData[i+5] === 'A' && + xmlData[i+6] === 'T' && + xmlData[i+7] === 'I' && + xmlData[i+8] === 'O' && + xmlData[i+9] === 'N') return true + return false +} + +function validateEntityName(name){ + if (util.isName(name)) + return name; + else + throw new Error(`Invalid entity name ${name}`); +} + +module.exports = readDocType; + + +/***/ }), + +/***/ 4769: +/***/ ((__unused_webpack_module, exports) => { + + +const defaultOptions = { + preserveOrder: false, + attributeNamePrefix: '@_', + attributesGroupName: false, + textNodeName: '#text', + ignoreAttributes: true, + removeNSPrefix: false, // remove NS from tag name or attribute name if true + allowBooleanAttributes: false, //a tag can have attributes without any value + //ignoreRootElement : false, + parseTagValue: true, + parseAttributeValue: false, + trimValues: true, //Trim string values of tag and attributes + cdataPropName: false, + numberParseOptions: { + hex: true, + leadingZeros: true, + eNotation: true + }, + tagValueProcessor: function(tagName, val) { + return val; + }, + attributeValueProcessor: function(attrName, val) { + return val; + }, + stopNodes: [], //nested tags will not be parsed even for errors + alwaysCreateTextNode: false, + isArray: () => false, + commentPropName: false, + unpairedTags: [], + processEntities: true, + htmlEntities: false, + ignoreDeclaration: false, + ignorePiTags: false, + transformTagName: false, + transformAttributeName: false, + updateTag: function(tagName, jPath, attrs){ + return tagName + }, + // skipEmptyListItem: false +}; + +const buildOptions = function(options) { + return Object.assign({}, defaultOptions, options); +}; + +exports.buildOptions = buildOptions; +exports.defaultOptions = defaultOptions; + +/***/ }), + +/***/ 3017: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +///@ts-check + +const util = __nccwpck_require__(7019); +const xmlNode = __nccwpck_require__(9307); +const readDocType = __nccwpck_require__(151); +const toNumber = __nccwpck_require__(6496); + +// const regx = +// '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)' +// .replace(/NAME/g, util.nameRegexp); + +//const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g"); +//const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g"); + +class OrderedObjParser{ + constructor(options){ + this.options = options; + this.currentNode = null; + this.tagsNodeStack = []; + this.docTypeEntities = {}; + this.lastEntities = { + "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, + "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, + "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, + "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, + }; + this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; + this.htmlEntities = { + "space": { regex: /&(nbsp|#160);/g, val: " " }, + // "lt" : { regex: /&(lt|#60);/g, val: "<" }, + // "gt" : { regex: /&(gt|#62);/g, val: ">" }, + // "amp" : { regex: /&(amp|#38);/g, val: "&" }, + // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, + // "apos" : { regex: /&(apos|#39);/g, val: "'" }, + "cent" : { regex: /&(cent|#162);/g, val: "¢" }, + "pound" : { regex: /&(pound|#163);/g, val: "£" }, + "yen" : { regex: /&(yen|#165);/g, val: "¥" }, + "euro" : { regex: /&(euro|#8364);/g, val: "€" }, + "copyright" : { regex: /&(copy|#169);/g, val: "©" }, + "reg" : { regex: /&(reg|#174);/g, val: "®" }, + "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, + "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, + "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, + }; + this.addExternalEntities = addExternalEntities; + this.parseXml = parseXml; + this.parseTextData = parseTextData; + this.resolveNameSpace = resolveNameSpace; + this.buildAttributesMap = buildAttributesMap; + this.isItStopNode = isItStopNode; + this.replaceEntitiesValue = replaceEntitiesValue; + this.readStopNodeData = readStopNodeData; + this.saveTextToParentTag = saveTextToParentTag; + this.addChild = addChild; + } + +} + +function addExternalEntities(externalEntities){ + const entKeys = Object.keys(externalEntities); + for (let i = 0; i < entKeys.length; i++) { + const ent = entKeys[i]; + this.lastEntities[ent] = { + regex: new RegExp("&"+ent+";","g"), + val : externalEntities[ent] + } + } +} + +/** + * @param {string} val + * @param {string} tagName + * @param {string} jPath + * @param {boolean} dontTrim + * @param {boolean} hasAttributes + * @param {boolean} isLeafNode + * @param {boolean} escapeEntities + */ +function parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) { + if (val !== undefined) { + if (this.options.trimValues && !dontTrim) { + val = val.trim(); + } + if(val.length > 0){ + if(!escapeEntities) val = this.replaceEntitiesValue(val); + + const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode); + if(newval === null || newval === undefined){ + //don't parse + return val; + }else if(typeof newval !== typeof val || newval !== val){ + //overwrite + return newval; + }else if(this.options.trimValues){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + const trimmedVal = val.trim(); + if(trimmedVal === val){ + return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); + }else{ + return val; + } + } + } + } +} + +function resolveNameSpace(tagname) { + if (this.options.removeNSPrefix) { + const tags = tagname.split(':'); + const prefix = tagname.charAt(0) === '/' ? '/' : ''; + if (tags[0] === 'xmlns') { + return ''; + } + if (tags.length === 2) { + tagname = prefix + tags[1]; + } + } + return tagname; +} + +//TODO: change regex to capture NS +//const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm"); +const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); + +function buildAttributesMap(attrStr, jPath, tagName) { + if (!this.options.ignoreAttributes && typeof attrStr === 'string') { + // attrStr = attrStr.replace(/\r?\n/g, ' '); + //attrStr = attrStr || attrStr.trim(); + + const matches = util.getAllMatches(attrStr, attrsRegx); + const len = matches.length; //don't make it inline + const attrs = {}; + for (let i = 0; i < len; i++) { + const attrName = this.resolveNameSpace(matches[i][1]); + let oldVal = matches[i][4]; + let aName = this.options.attributeNamePrefix + attrName; + if (attrName.length) { + if (this.options.transformAttributeName) { + aName = this.options.transformAttributeName(aName); + } + if(aName === "__proto__") aName = "#__proto__"; + if (oldVal !== undefined) { + if (this.options.trimValues) { + oldVal = oldVal.trim(); + } + oldVal = this.replaceEntitiesValue(oldVal); + const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath); + if(newVal === null || newVal === undefined){ + //don't parse + attrs[aName] = oldVal; + }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){ + //overwrite + attrs[aName] = newVal; + }else{ + //parse + attrs[aName] = parseValue( + oldVal, + this.options.parseAttributeValue, + this.options.numberParseOptions + ); + } + } else if (this.options.allowBooleanAttributes) { + attrs[aName] = true; + } + } + } + if (!Object.keys(attrs).length) { + return; + } + if (this.options.attributesGroupName) { + const attrCollection = {}; + attrCollection[this.options.attributesGroupName] = attrs; + return attrCollection; + } + return attrs + } +} + +const parseXml = function(xmlData) { + xmlData = xmlData.replace(/\r\n?/g, "\n"); //TODO: remove this line + const xmlObj = new xmlNode('!xml'); + let currentNode = xmlObj; + let textData = ""; + let jPath = ""; + for(let i=0; i< xmlData.length; i++){//for each char in XML data + const ch = xmlData[i]; + if(ch === '<'){ + // const nextIndex = i+1; + // const _2ndChar = xmlData[nextIndex]; + if( xmlData[i+1] === '/') {//Closing Tag + const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.") + let tagName = xmlData.substring(i+2,closeIndex).trim(); + + if(this.options.removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + } + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + if(currentNode){ + textData = this.saveTextToParentTag(textData, currentNode, jPath); + } + + //check if last tag of nested tag was unpaired tag + const lastTagName = jPath.substring(jPath.lastIndexOf(".")+1); + if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){ + throw new Error(`Unpaired tag can not be used as closing tag: `); + } + let propIndex = 0 + if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){ + propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1) + this.tagsNodeStack.pop(); + }else{ + propIndex = jPath.lastIndexOf("."); + } + jPath = jPath.substring(0, propIndex); + + currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope + textData = ""; + i = closeIndex; + } else if( xmlData[i+1] === '?') { + + let tagData = readTagExp(xmlData,i, false, "?>"); + if(!tagData) throw new Error("Pi Tag is not closed."); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + if( (this.options.ignoreDeclaration && tagData.tagName === "?xml") || this.options.ignorePiTags){ + + }else{ + + const childNode = new xmlNode(tagData.tagName); + childNode.add(this.options.textNodeName, ""); + + if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName); + } + this.addChild(currentNode, childNode, jPath) + + } + + + i = tagData.closeIndex + 1; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const endIndex = findClosingIndex(xmlData, "-->", i+4, "Comment is not closed.") + if(this.options.commentPropName){ + const comment = xmlData.substring(i + 4, endIndex - 2); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]); + } + i = endIndex; + } else if( xmlData.substr(i + 1, 2) === '!D') { + const result = readDocType(xmlData, i); + this.docTypeEntities = result.entities; + i = result.i; + }else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2; + const tagExp = xmlData.substring(i + 9,closeIndex); + + textData = this.saveTextToParentTag(textData, currentNode, jPath); + + let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true, true); + if(val == undefined) val = ""; + + //cdata should be set even if it is 0 length string + if(this.options.cdataPropName){ + currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]); + }else{ + currentNode.add(this.options.textNodeName, val); + } + + i = closeIndex + 2; + }else {//Opening tag + let result = readTagExp(xmlData,i, this.options.removeNSPrefix); + let tagName= result.tagName; + const rawTagName = result.rawTagName; + let tagExp = result.tagExp; + let attrExpPresent = result.attrExpPresent; + let closeIndex = result.closeIndex; + + if (this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + //save text as child node + if (currentNode && textData) { + if(currentNode.tagname !== '!xml'){ + //when nested tag is found + textData = this.saveTextToParentTag(textData, currentNode, jPath, false); + } + } + + //check if last tag was unpaired tag + const lastTag = currentNode; + if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){ + currentNode = this.tagsNodeStack.pop(); + jPath = jPath.substring(0, jPath.lastIndexOf(".")); + } + if(tagName !== xmlObj.tagname){ + jPath += jPath ? "." + tagName : tagName; + } + if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) { + let tagContent = ""; + //self-closing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + i = result.closeIndex; + } + //unpaired tag + else if(this.options.unpairedTags.indexOf(tagName) !== -1){ + + i = result.closeIndex; + } + //normal tag + else{ + //read until closing tag is found + const result = this.readStopNodeData(xmlData, rawTagName, closeIndex + 1); + if(!result) throw new Error(`Unexpected end of ${rawTagName}`); + i = result.i; + tagContent = result.tagContent; + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + if(tagContent) { + tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true); + } + + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + childNode.add(this.options.textNodeName, tagContent); + + this.addChild(currentNode, childNode, jPath) + }else{ + //selfClosing tag + if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ + if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' + tagName = tagName.substr(0, tagName.length - 1); + jPath = jPath.substr(0, jPath.length - 1); + tagExp = tagName; + }else{ + tagExp = tagExp.substr(0, tagExp.length - 1); + } + + if(this.options.transformTagName) { + tagName = this.options.transformTagName(tagName); + } + + const childNode = new xmlNode(tagName); + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + jPath = jPath.substr(0, jPath.lastIndexOf(".")); + } + //opening tag + else{ + const childNode = new xmlNode( tagName); + this.tagsNodeStack.push(currentNode); + + if(tagName !== tagExp && attrExpPresent){ + childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); + } + this.addChild(currentNode, childNode, jPath) + currentNode = childNode; + } + textData = ""; + i = closeIndex; + } + } + }else{ + textData += xmlData[i]; + } + } + return xmlObj.child; +} + +function addChild(currentNode, childNode, jPath){ + const result = this.options.updateTag(childNode.tagname, jPath, childNode[":@"]) + if(result === false){ + }else if(typeof result === "string"){ + childNode.tagname = result + currentNode.addChild(childNode); + }else{ + currentNode.addChild(childNode); + } +} + +const replaceEntitiesValue = function(val){ + + if(this.options.processEntities){ + for(let entityName in this.docTypeEntities){ + const entity = this.docTypeEntities[entityName]; + val = val.replace( entity.regx, entity.val); + } + for(let entityName in this.lastEntities){ + const entity = this.lastEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + if(this.options.htmlEntities){ + for(let entityName in this.htmlEntities){ + const entity = this.htmlEntities[entityName]; + val = val.replace( entity.regex, entity.val); + } + } + val = val.replace( this.ampEntity.regex, this.ampEntity.val); + } + return val; +} +function saveTextToParentTag(textData, currentNode, jPath, isLeafNode) { + if (textData) { //store previously collected data as textNode + if(isLeafNode === undefined) isLeafNode = Object.keys(currentNode.child).length === 0 + + textData = this.parseTextData(textData, + currentNode.tagname, + jPath, + false, + currentNode[":@"] ? Object.keys(currentNode[":@"]).length !== 0 : false, + isLeafNode); + + if (textData !== undefined && textData !== "") + currentNode.add(this.options.textNodeName, textData); + textData = ""; + } + return textData; +} + +//TODO: use jPath to simplify the logic +/** + * + * @param {string[]} stopNodes + * @param {string} jPath + * @param {string} currentTagName + */ +function isItStopNode(stopNodes, jPath, currentTagName){ + const allNodesExp = "*." + currentTagName; + for (const stopNodePath in stopNodes) { + const stopNodeExp = stopNodes[stopNodePath]; + if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true; + } + return false; +} + +/** + * Returns the tag Expression and where it is ending handling single-double quotes situation + * @param {string} xmlData + * @param {number} i starting index + * @returns + */ +function tagExpWithClosingIndex(xmlData, i, closingChar = ">"){ + let attrBoundary; + let tagExp = ""; + for (let index = i; index < xmlData.length; index++) { + let ch = xmlData[index]; + if (attrBoundary) { + if (ch === attrBoundary) attrBoundary = "";//reset + } else if (ch === '"' || ch === "'") { + attrBoundary = ch; + } else if (ch === closingChar[0]) { + if(closingChar[1]){ + if(xmlData[index + 1] === closingChar[1]){ + return { + data: tagExp, + index: index + } + } + }else{ + return { + data: tagExp, + index: index + } + } + } else if (ch === '\t') { + ch = " " + } + tagExp += ch; + } +} + +function findClosingIndex(xmlData, str, i, errMsg){ + const closingIndex = xmlData.indexOf(str, i); + if(closingIndex === -1){ + throw new Error(errMsg) + }else{ + return closingIndex + str.length - 1; + } +} + +function readTagExp(xmlData,i, removeNSPrefix, closingChar = ">"){ + const result = tagExpWithClosingIndex(xmlData, i+1, closingChar); + if(!result) return; + let tagExp = result.data; + const closeIndex = result.index; + const separatorIndex = tagExp.search(/\s/); + let tagName = tagExp; + let attrExpPresent = true; + if(separatorIndex !== -1){//separate tag name and attributes expression + tagName = tagExp.substring(0, separatorIndex); + tagExp = tagExp.substring(separatorIndex + 1).trimStart(); + } + + const rawTagName = tagName; + if(removeNSPrefix){ + const colonIndex = tagName.indexOf(":"); + if(colonIndex !== -1){ + tagName = tagName.substr(colonIndex+1); + attrExpPresent = tagName !== result.data.substr(colonIndex + 1); + } + } + + return { + tagName: tagName, + tagExp: tagExp, + closeIndex: closeIndex, + attrExpPresent: attrExpPresent, + rawTagName: rawTagName, + } +} +/** + * find paired tag for a stop node + * @param {string} xmlData + * @param {string} tagName + * @param {number} i + */ +function readStopNodeData(xmlData, tagName, i){ + const startIndex = i; + // Starting at 1 since we already have an open tag + let openTagCount = 1; + + for (; i < xmlData.length; i++) { + if( xmlData[i] === "<"){ + if (xmlData[i+1] === "/") {//close tag + const closeIndex = findClosingIndex(xmlData, ">", i, `${tagName} is not closed`); + let closeTagName = xmlData.substring(i+2,closeIndex).trim(); + if(closeTagName === tagName){ + openTagCount--; + if (openTagCount === 0) { + return { + tagContent: xmlData.substring(startIndex, i), + i : closeIndex + } + } + } + i=closeIndex; + } else if(xmlData[i+1] === '?') { + const closeIndex = findClosingIndex(xmlData, "?>", i+1, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 3) === '!--') { + const closeIndex = findClosingIndex(xmlData, "-->", i+3, "StopNode is not closed.") + i=closeIndex; + } else if(xmlData.substr(i + 1, 2) === '![') { + const closeIndex = findClosingIndex(xmlData, "]]>", i, "StopNode is not closed.") - 2; + i=closeIndex; + } else { + const tagData = readTagExp(xmlData, i, '>') + + if (tagData) { + const openTagName = tagData && tagData.tagName; + if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { + openTagCount++; + } + i=tagData.closeIndex; + } + } + } + }//end for loop +} + +function parseValue(val, shouldParse, options) { + if (shouldParse && typeof val === 'string') { + //console.log(options) + const newval = val.trim(); + if(newval === 'true' ) return true; + else if(newval === 'false' ) return false; + else return toNumber(val, options); + } else { + if (util.isExist(val)) { + return val; + } else { + return ''; + } + } +} + + +module.exports = OrderedObjParser; + + +/***/ }), + +/***/ 9844: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const { buildOptions} = __nccwpck_require__(4769); +const OrderedObjParser = __nccwpck_require__(3017); +const { prettify} = __nccwpck_require__(7594); +const validator = __nccwpck_require__(9433); + +class XMLParser{ + + constructor(options){ + this.externalEntities = {}; + this.options = buildOptions(options); + + } + /** + * Parse XML dats to JS object + * @param {string|Buffer} xmlData + * @param {boolean|Object} validationOption + */ + parse(xmlData,validationOption){ + if(typeof xmlData === "string"){ + }else if( xmlData.toString){ + xmlData = xmlData.toString(); + }else{ + throw new Error("XML data is accepted in String or Bytes[] form.") + } + if( validationOption){ + if(validationOption === true) validationOption = {}; //validate with default options + + const result = validator.validate(xmlData, validationOption); + if (result !== true) { + throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) + } + } + const orderedObjParser = new OrderedObjParser(this.options); + orderedObjParser.addExternalEntities(this.externalEntities); + const orderedResult = orderedObjParser.parseXml(xmlData); + if(this.options.preserveOrder || orderedResult === undefined) return orderedResult; + else return prettify(orderedResult, this.options); + } + + /** + * Add Entity which is not by default supported by this library + * @param {string} key + * @param {string} value + */ + addEntity(key, value){ + if(value.indexOf("&") !== -1){ + throw new Error("Entity value can't have '&'") + }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ + throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") + }else if(value === "&"){ + throw new Error("An entity with value '&' is not permitted"); + }else{ + this.externalEntities[key] = value; + } + } +} + +module.exports = XMLParser; + +/***/ }), + +/***/ 7594: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +/** + * + * @param {array} node + * @param {any} options + * @returns + */ +function prettify(node, options){ + return compress( node, options); +} + +/** + * + * @param {array} arr + * @param {object} options + * @param {string} jPath + * @returns object + */ +function compress(arr, options, jPath){ + let text; + const compressedObj = {}; + for (let i = 0; i < arr.length; i++) { + const tagObj = arr[i]; + const property = propName(tagObj); + let newJpath = ""; + if(jPath === undefined) newJpath = property; + else newJpath = jPath + "." + property; + + if(property === options.textNodeName){ + if(text === undefined) text = tagObj[property]; + else text += "" + tagObj[property]; + }else if(property === undefined){ + continue; + }else if(tagObj[property]){ + + let val = compress(tagObj[property], options, newJpath); + const isLeaf = isLeafTag(val, options); + + if(tagObj[":@"]){ + assignAttributes( val, tagObj[":@"], newJpath, options); + }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){ + val = val[options.textNodeName]; + }else if(Object.keys(val).length === 0){ + if(options.alwaysCreateTextNode) val[options.textNodeName] = ""; + else val = ""; + } + + if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) { + if(!Array.isArray(compressedObj[property])) { + compressedObj[property] = [ compressedObj[property] ]; + } + compressedObj[property].push(val); + }else{ + //TODO: if a node is not an array, then check if it should be an array + //also determine if it is a leaf node + if (options.isArray(property, newJpath, isLeaf )) { + compressedObj[property] = [val]; + }else{ + compressedObj[property] = val; + } + } + } + + } + // if(text && text.length > 0) compressedObj[options.textNodeName] = text; + if(typeof text === "string"){ + if(text.length > 0) compressedObj[options.textNodeName] = text; + }else if(text !== undefined) compressedObj[options.textNodeName] = text; + return compressedObj; +} + +function propName(obj){ + const keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + const key = keys[i]; + if(key !== ":@") return key; + } +} + +function assignAttributes(obj, attrMap, jpath, options){ + if (attrMap) { + const keys = Object.keys(attrMap); + const len = keys.length; //don't make it inline + for (let i = 0; i < len; i++) { + const atrrName = keys[i]; + if (options.isArray(atrrName, jpath + "." + atrrName, true, true)) { + obj[atrrName] = [ attrMap[atrrName] ]; + } else { + obj[atrrName] = attrMap[atrrName]; + } + } + } +} + +function isLeafTag(obj, options){ + const { textNodeName } = options; + const propCount = Object.keys(obj).length; + + if (propCount === 0) { + return true; + } + + if ( + propCount === 1 && + (obj[textNodeName] || typeof obj[textNodeName] === "boolean" || obj[textNodeName] === 0) + ) { + return true; + } + + return false; +} +exports.prettify = prettify; + + +/***/ }), + +/***/ 9307: +/***/ ((module) => { + +"use strict"; + + +class XmlNode{ + constructor(tagname) { + this.tagname = tagname; + this.child = []; //nested tags, text, cdata, comments in order + this[":@"] = {}; //attributes map + } + add(key,val){ + // this.child.push( {name : key, val: val, isCdata: isCdata }); + if(key === "__proto__") key = "#__proto__"; + this.child.push( {[key]: val }); + } + addChild(node) { + if(node.tagname === "__proto__") node.tagname = "#__proto__"; + if(node[":@"] && Object.keys(node[":@"]).length > 0){ + this.child.push( { [node.tagname]: node.child, [":@"]: node[":@"] }); + }else{ + this.child.push( { [node.tagname]: node.child }); + } + }; +}; + + +module.exports = XmlNode; + +/***/ }), + +/***/ 1622: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = __nccwpck_require__(230); + + +/***/ }), + +/***/ 230: +/***/ (function(module) { + +// Generated by CoffeeScript 1.8.0 +(function() { + var Heap, defaultCmp, floor, heapify, heappop, heappush, heappushpop, heapreplace, insort, min, nlargest, nsmallest, updateItem, _siftdown, _siftup; + + floor = Math.floor, min = Math.min; + + + /* + Default comparison function to be used + */ + + defaultCmp = function(x, y) { + if (x < y) { + return -1; + } + if (x > y) { + return 1; + } + return 0; + }; + + + /* + Insert item x in list a, and keep it sorted assuming a is sorted. + + If x is already in a, insert it to the right of the rightmost x. + + Optional args lo (default 0) and hi (default a.length) bound the slice + of a to be searched. + */ + + insort = function(a, x, lo, hi, cmp) { + var mid; + if (lo == null) { + lo = 0; + } + if (cmp == null) { + cmp = defaultCmp; + } + if (lo < 0) { + throw new Error('lo must be non-negative'); + } + if (hi == null) { + hi = a.length; + } + while (lo < hi) { + mid = floor((lo + hi) / 2); + if (cmp(x, a[mid]) < 0) { + hi = mid; + } else { + lo = mid + 1; + } + } + return ([].splice.apply(a, [lo, lo - lo].concat(x)), x); + }; + + + /* + Push item onto heap, maintaining the heap invariant. + */ + + heappush = function(array, item, cmp) { + if (cmp == null) { + cmp = defaultCmp; + } + array.push(item); + return _siftdown(array, 0, array.length - 1, cmp); + }; + + + /* + Pop the smallest item off the heap, maintaining the heap invariant. + */ + + heappop = function(array, cmp) { + var lastelt, returnitem; + if (cmp == null) { + cmp = defaultCmp; + } + lastelt = array.pop(); + if (array.length) { + returnitem = array[0]; + array[0] = lastelt; + _siftup(array, 0, cmp); + } else { + returnitem = lastelt; + } + return returnitem; + }; + + + /* + Pop and return the current smallest value, and add the new item. + + This is more efficient than heappop() followed by heappush(), and can be + more appropriate when using a fixed size heap. Note that the value + returned may be larger than item! That constrains reasonable use of + this routine unless written as part of a conditional replacement: + if item > array[0] + item = heapreplace(array, item) + */ + + heapreplace = function(array, item, cmp) { + var returnitem; + if (cmp == null) { + cmp = defaultCmp; + } + returnitem = array[0]; + array[0] = item; + _siftup(array, 0, cmp); + return returnitem; + }; + + + /* + Fast version of a heappush followed by a heappop. + */ + + heappushpop = function(array, item, cmp) { + var _ref; + if (cmp == null) { + cmp = defaultCmp; + } + if (array.length && cmp(array[0], item) < 0) { + _ref = [array[0], item], item = _ref[0], array[0] = _ref[1]; + _siftup(array, 0, cmp); + } + return item; + }; + + + /* + Transform list into a heap, in-place, in O(array.length) time. + */ + + heapify = function(array, cmp) { + var i, _i, _j, _len, _ref, _ref1, _results, _results1; + if (cmp == null) { + cmp = defaultCmp; + } + _ref1 = (function() { + _results1 = []; + for (var _j = 0, _ref = floor(array.length / 2); 0 <= _ref ? _j < _ref : _j > _ref; 0 <= _ref ? _j++ : _j--){ _results1.push(_j); } + return _results1; + }).apply(this).reverse(); + _results = []; + for (_i = 0, _len = _ref1.length; _i < _len; _i++) { + i = _ref1[_i]; + _results.push(_siftup(array, i, cmp)); + } + return _results; + }; + + + /* + Update the position of the given item in the heap. + This function should be called every time the item is being modified. + */ + + updateItem = function(array, item, cmp) { + var pos; + if (cmp == null) { + cmp = defaultCmp; + } + pos = array.indexOf(item); + if (pos === -1) { + return; + } + _siftdown(array, 0, pos, cmp); + return _siftup(array, pos, cmp); + }; + + + /* + Find the n largest elements in a dataset. + */ + + nlargest = function(array, n, cmp) { + var elem, result, _i, _len, _ref; + if (cmp == null) { + cmp = defaultCmp; + } + result = array.slice(0, n); + if (!result.length) { + return result; + } + heapify(result, cmp); + _ref = array.slice(n); + for (_i = 0, _len = _ref.length; _i < _len; _i++) { + elem = _ref[_i]; + heappushpop(result, elem, cmp); + } + return result.sort(cmp).reverse(); + }; + + + /* + Find the n smallest elements in a dataset. + */ + + nsmallest = function(array, n, cmp) { + var elem, i, los, result, _i, _j, _len, _ref, _ref1, _results; + if (cmp == null) { + cmp = defaultCmp; + } + if (n * 10 <= array.length) { + result = array.slice(0, n).sort(cmp); + if (!result.length) { + return result; + } + los = result[result.length - 1]; + _ref = array.slice(n); + for (_i = 0, _len = _ref.length; _i < _len; _i++) { + elem = _ref[_i]; + if (cmp(elem, los) < 0) { + insort(result, elem, 0, null, cmp); + result.pop(); + los = result[result.length - 1]; + } + } + return result; + } + heapify(array, cmp); + _results = []; + for (i = _j = 0, _ref1 = min(n, array.length); 0 <= _ref1 ? _j < _ref1 : _j > _ref1; i = 0 <= _ref1 ? ++_j : --_j) { + _results.push(heappop(array, cmp)); + } + return _results; + }; + + _siftdown = function(array, startpos, pos, cmp) { + var newitem, parent, parentpos; + if (cmp == null) { + cmp = defaultCmp; + } + newitem = array[pos]; + while (pos > startpos) { + parentpos = (pos - 1) >> 1; + parent = array[parentpos]; + if (cmp(newitem, parent) < 0) { + array[pos] = parent; + pos = parentpos; + continue; + } + break; + } + return array[pos] = newitem; + }; + + _siftup = function(array, pos, cmp) { + var childpos, endpos, newitem, rightpos, startpos; + if (cmp == null) { + cmp = defaultCmp; + } + endpos = array.length; + startpos = pos; + newitem = array[pos]; + childpos = 2 * pos + 1; + while (childpos < endpos) { + rightpos = childpos + 1; + if (rightpos < endpos && !(cmp(array[childpos], array[rightpos]) < 0)) { + childpos = rightpos; + } + array[pos] = array[childpos]; + pos = childpos; + childpos = 2 * pos + 1; + } + array[pos] = newitem; + return _siftdown(array, startpos, pos, cmp); + }; + + Heap = (function() { + Heap.push = heappush; + + Heap.pop = heappop; + + Heap.replace = heapreplace; + + Heap.pushpop = heappushpop; + + Heap.heapify = heapify; + + Heap.updateItem = updateItem; + + Heap.nlargest = nlargest; + + Heap.nsmallest = nsmallest; + + function Heap(cmp) { + this.cmp = cmp != null ? cmp : defaultCmp; + this.nodes = []; + } + + Heap.prototype.push = function(x) { + return heappush(this.nodes, x, this.cmp); + }; + + Heap.prototype.pop = function() { + return heappop(this.nodes, this.cmp); + }; + + Heap.prototype.peek = function() { + return this.nodes[0]; + }; + + Heap.prototype.contains = function(x) { + return this.nodes.indexOf(x) !== -1; + }; + + Heap.prototype.replace = function(x) { + return heapreplace(this.nodes, x, this.cmp); + }; + + Heap.prototype.pushpop = function(x) { + return heappushpop(this.nodes, x, this.cmp); + }; + + Heap.prototype.heapify = function() { + return heapify(this.nodes, this.cmp); + }; + + Heap.prototype.updateItem = function(x) { + return updateItem(this.nodes, x, this.cmp); + }; + + Heap.prototype.clear = function() { + return this.nodes = []; + }; + + Heap.prototype.empty = function() { + return this.nodes.length === 0; + }; + + Heap.prototype.size = function() { + return this.nodes.length; + }; + + Heap.prototype.clone = function() { + var heap; + heap = new Heap(); + heap.nodes = this.nodes.slice(0); + return heap; + }; + + Heap.prototype.toArray = function() { + return this.nodes.slice(0); + }; + + Heap.prototype.insert = Heap.prototype.push; + + Heap.prototype.top = Heap.prototype.peek; + + Heap.prototype.front = Heap.prototype.peek; + + Heap.prototype.has = Heap.prototype.contains; + + Heap.prototype.copy = Heap.prototype.clone; + + return Heap; + + })(); + + (function(root, factory) { + if (typeof define === 'function' && define.amd) { + return define([], factory); + } else if (true) { + return module.exports = factory(); + } else {} + })(this, function() { + return Heap; + }); + +}).call(this); + + +/***/ }), + +/***/ 4281: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + + +var loader = __nccwpck_require__(1950); +var dumper = __nccwpck_require__(9980); + + +function renamed(from, to) { + return function () { + throw new Error('Function yaml.' + from + ' is removed in js-yaml 4. ' + + 'Use yaml.' + to + ' instead, which is now safe by default.'); + }; +} + + +module.exports.Type = __nccwpck_require__(9557); +module.exports.Schema = __nccwpck_require__(2046); +module.exports.FAILSAFE_SCHEMA = __nccwpck_require__(9832); +module.exports.JSON_SCHEMA = __nccwpck_require__(8927); +module.exports.CORE_SCHEMA = __nccwpck_require__(5746); +module.exports.DEFAULT_SCHEMA = __nccwpck_require__(7336); +module.exports.load = loader.load; +module.exports.loadAll = loader.loadAll; +module.exports.dump = dumper.dump; +module.exports.YAMLException = __nccwpck_require__(1248); + +// Re-export all types in case user wants to create custom schema +module.exports.types = { + binary: __nccwpck_require__(8149), + float: __nccwpck_require__(7584), + map: __nccwpck_require__(7316), + null: __nccwpck_require__(4333), + pairs: __nccwpck_require__(6267), + set: __nccwpck_require__(8758), + timestamp: __nccwpck_require__(8966), + bool: __nccwpck_require__(7296), + int: __nccwpck_require__(4652), + merge: __nccwpck_require__(6854), + omap: __nccwpck_require__(8649), + seq: __nccwpck_require__(7161), + str: __nccwpck_require__(3929) +}; + +// Removed functions from JS-YAML 3.0.x +module.exports.safeLoad = renamed('safeLoad', 'load'); +module.exports.safeLoadAll = renamed('safeLoadAll', 'loadAll'); +module.exports.safeDump = renamed('safeDump', 'dump'); + + +/***/ }), + +/***/ 9816: +/***/ ((module) => { + +"use strict"; + + + +function isNothing(subject) { + return (typeof subject === 'undefined') || (subject === null); +} + + +function isObject(subject) { + return (typeof subject === 'object') && (subject !== null); +} + + +function toArray(sequence) { + if (Array.isArray(sequence)) return sequence; + else if (isNothing(sequence)) return []; + + return [ sequence ]; +} + + +function extend(target, source) { + var index, length, key, sourceKeys; + + if (source) { + sourceKeys = Object.keys(source); + + for (index = 0, length = sourceKeys.length; index < length; index += 1) { + key = sourceKeys[index]; + target[key] = source[key]; + } + } + + return target; +} + + +function repeat(string, count) { + var result = '', cycle; + + for (cycle = 0; cycle < count; cycle += 1) { + result += string; + } + + return result; +} + + +function isNegativeZero(number) { + return (number === 0) && (Number.NEGATIVE_INFINITY === 1 / number); +} + + +module.exports.isNothing = isNothing; +module.exports.isObject = isObject; +module.exports.toArray = toArray; +module.exports.repeat = repeat; +module.exports.isNegativeZero = isNegativeZero; +module.exports.extend = extend; + + +/***/ }), + +/***/ 9980: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +/*eslint-disable no-use-before-define*/ + +var common = __nccwpck_require__(9816); +var YAMLException = __nccwpck_require__(1248); +var DEFAULT_SCHEMA = __nccwpck_require__(7336); + +var _toString = Object.prototype.toString; +var _hasOwnProperty = Object.prototype.hasOwnProperty; + +var CHAR_BOM = 0xFEFF; +var CHAR_TAB = 0x09; /* Tab */ +var CHAR_LINE_FEED = 0x0A; /* LF */ +var CHAR_CARRIAGE_RETURN = 0x0D; /* CR */ +var CHAR_SPACE = 0x20; /* Space */ +var CHAR_EXCLAMATION = 0x21; /* ! */ +var CHAR_DOUBLE_QUOTE = 0x22; /* " */ +var CHAR_SHARP = 0x23; /* # */ +var CHAR_PERCENT = 0x25; /* % */ +var CHAR_AMPERSAND = 0x26; /* & */ +var CHAR_SINGLE_QUOTE = 0x27; /* ' */ +var CHAR_ASTERISK = 0x2A; /* * */ +var CHAR_COMMA = 0x2C; /* , */ +var CHAR_MINUS = 0x2D; /* - */ +var CHAR_COLON = 0x3A; /* : */ +var CHAR_EQUALS = 0x3D; /* = */ +var CHAR_GREATER_THAN = 0x3E; /* > */ +var CHAR_QUESTION = 0x3F; /* ? */ +var CHAR_COMMERCIAL_AT = 0x40; /* @ */ +var CHAR_LEFT_SQUARE_BRACKET = 0x5B; /* [ */ +var CHAR_RIGHT_SQUARE_BRACKET = 0x5D; /* ] */ +var CHAR_GRAVE_ACCENT = 0x60; /* ` */ +var CHAR_LEFT_CURLY_BRACKET = 0x7B; /* { */ +var CHAR_VERTICAL_LINE = 0x7C; /* | */ +var CHAR_RIGHT_CURLY_BRACKET = 0x7D; /* } */ + +var ESCAPE_SEQUENCES = {}; + +ESCAPE_SEQUENCES[0x00] = '\\0'; +ESCAPE_SEQUENCES[0x07] = '\\a'; +ESCAPE_SEQUENCES[0x08] = '\\b'; +ESCAPE_SEQUENCES[0x09] = '\\t'; +ESCAPE_SEQUENCES[0x0A] = '\\n'; +ESCAPE_SEQUENCES[0x0B] = '\\v'; +ESCAPE_SEQUENCES[0x0C] = '\\f'; +ESCAPE_SEQUENCES[0x0D] = '\\r'; +ESCAPE_SEQUENCES[0x1B] = '\\e'; +ESCAPE_SEQUENCES[0x22] = '\\"'; +ESCAPE_SEQUENCES[0x5C] = '\\\\'; +ESCAPE_SEQUENCES[0x85] = '\\N'; +ESCAPE_SEQUENCES[0xA0] = '\\_'; +ESCAPE_SEQUENCES[0x2028] = '\\L'; +ESCAPE_SEQUENCES[0x2029] = '\\P'; + +var DEPRECATED_BOOLEANS_SYNTAX = [ + 'y', 'Y', 'yes', 'Yes', 'YES', 'on', 'On', 'ON', + 'n', 'N', 'no', 'No', 'NO', 'off', 'Off', 'OFF' +]; + +var DEPRECATED_BASE60_SYNTAX = /^[-+]?[0-9_]+(?::[0-9_]+)+(?:\.[0-9_]*)?$/; + +function compileStyleMap(schema, map) { + var result, keys, index, length, tag, style, type; + + if (map === null) return {}; + + result = {}; + keys = Object.keys(map); + + for (index = 0, length = keys.length; index < length; index += 1) { + tag = keys[index]; + style = String(map[tag]); + + if (tag.slice(0, 2) === '!!') { + tag = 'tag:yaml.org,2002:' + tag.slice(2); + } + type = schema.compiledTypeMap['fallback'][tag]; + + if (type && _hasOwnProperty.call(type.styleAliases, style)) { + style = type.styleAliases[style]; + } + + result[tag] = style; + } + + return result; +} + +function encodeHex(character) { + var string, handle, length; + + string = character.toString(16).toUpperCase(); + + if (character <= 0xFF) { + handle = 'x'; + length = 2; + } else if (character <= 0xFFFF) { + handle = 'u'; + length = 4; + } else if (character <= 0xFFFFFFFF) { + handle = 'U'; + length = 8; + } else { + throw new YAMLException('code point within a string may not be greater than 0xFFFFFFFF'); + } + + return '\\' + handle + common.repeat('0', length - string.length) + string; +} + + +var QUOTING_TYPE_SINGLE = 1, + QUOTING_TYPE_DOUBLE = 2; + +function State(options) { + this.schema = options['schema'] || DEFAULT_SCHEMA; + this.indent = Math.max(1, (options['indent'] || 2)); + this.noArrayIndent = options['noArrayIndent'] || false; + this.skipInvalid = options['skipInvalid'] || false; + this.flowLevel = (common.isNothing(options['flowLevel']) ? -1 : options['flowLevel']); + this.styleMap = compileStyleMap(this.schema, options['styles'] || null); + this.sortKeys = options['sortKeys'] || false; + this.lineWidth = options['lineWidth'] || 80; + this.noRefs = options['noRefs'] || false; + this.noCompatMode = options['noCompatMode'] || false; + this.condenseFlow = options['condenseFlow'] || false; + this.quotingType = options['quotingType'] === '"' ? QUOTING_TYPE_DOUBLE : QUOTING_TYPE_SINGLE; + this.forceQuotes = options['forceQuotes'] || false; + this.replacer = typeof options['replacer'] === 'function' ? options['replacer'] : null; + + this.implicitTypes = this.schema.compiledImplicit; + this.explicitTypes = this.schema.compiledExplicit; + + this.tag = null; + this.result = ''; + + this.duplicates = []; + this.usedDuplicates = null; +} + +// Indents every line in a string. Empty lines (\n only) are not indented. +function indentString(string, spaces) { + var ind = common.repeat(' ', spaces), + position = 0, + next = -1, + result = '', + line, + length = string.length; + + while (position < length) { + next = string.indexOf('\n', position); + if (next === -1) { + line = string.slice(position); + position = length; + } else { + line = string.slice(position, next + 1); + position = next + 1; + } + + if (line.length && line !== '\n') result += ind; + + result += line; + } + + return result; +} + +function generateNextLine(state, level) { + return '\n' + common.repeat(' ', state.indent * level); +} + +function testImplicitResolving(state, str) { + var index, length, type; + + for (index = 0, length = state.implicitTypes.length; index < length; index += 1) { + type = state.implicitTypes[index]; + + if (type.resolve(str)) { + return true; + } + } + + return false; +} + +// [33] s-white ::= s-space | s-tab +function isWhitespace(c) { + return c === CHAR_SPACE || c === CHAR_TAB; +} + +// Returns true if the character can be printed without escaping. +// From YAML 1.2: "any allowed characters known to be non-printable +// should also be escaped. [However,] This isn’t mandatory" +// Derived from nb-char - \t - #x85 - #xA0 - #x2028 - #x2029. +function isPrintable(c) { + return (0x00020 <= c && c <= 0x00007E) + || ((0x000A1 <= c && c <= 0x00D7FF) && c !== 0x2028 && c !== 0x2029) + || ((0x0E000 <= c && c <= 0x00FFFD) && c !== CHAR_BOM) + || (0x10000 <= c && c <= 0x10FFFF); +} + +// [34] ns-char ::= nb-char - s-white +// [27] nb-char ::= c-printable - b-char - c-byte-order-mark +// [26] b-char ::= b-line-feed | b-carriage-return +// Including s-white (for some reason, examples doesn't match specs in this aspect) +// ns-char ::= c-printable - b-line-feed - b-carriage-return - c-byte-order-mark +function isNsCharOrWhitespace(c) { + return isPrintable(c) + && c !== CHAR_BOM + // - b-char + && c !== CHAR_CARRIAGE_RETURN + && c !== CHAR_LINE_FEED; +} + +// [127] ns-plain-safe(c) ::= c = flow-out ⇒ ns-plain-safe-out +// c = flow-in ⇒ ns-plain-safe-in +// c = block-key ⇒ ns-plain-safe-out +// c = flow-key ⇒ ns-plain-safe-in +// [128] ns-plain-safe-out ::= ns-char +// [129] ns-plain-safe-in ::= ns-char - c-flow-indicator +// [130] ns-plain-char(c) ::= ( ns-plain-safe(c) - “:” - “#” ) +// | ( /* An ns-char preceding */ “#” ) +// | ( “:” /* Followed by an ns-plain-safe(c) */ ) +function isPlainSafe(c, prev, inblock) { + var cIsNsCharOrWhitespace = isNsCharOrWhitespace(c); + var cIsNsChar = cIsNsCharOrWhitespace && !isWhitespace(c); + return ( + // ns-plain-safe + inblock ? // c = flow-in + cIsNsCharOrWhitespace + : cIsNsCharOrWhitespace + // - c-flow-indicator + && c !== CHAR_COMMA + && c !== CHAR_LEFT_SQUARE_BRACKET + && c !== CHAR_RIGHT_SQUARE_BRACKET + && c !== CHAR_LEFT_CURLY_BRACKET + && c !== CHAR_RIGHT_CURLY_BRACKET + ) + // ns-plain-char + && c !== CHAR_SHARP // false on '#' + && !(prev === CHAR_COLON && !cIsNsChar) // false on ': ' + || (isNsCharOrWhitespace(prev) && !isWhitespace(prev) && c === CHAR_SHARP) // change to true on '[^ ]#' + || (prev === CHAR_COLON && cIsNsChar); // change to true on ':[^ ]' +} + +// Simplified test for values allowed as the first character in plain style. +function isPlainSafeFirst(c) { + // Uses a subset of ns-char - c-indicator + // where ns-char = nb-char - s-white. + // No support of ( ( “?” | “:” | “-” ) /* Followed by an ns-plain-safe(c)) */ ) part + return isPrintable(c) && c !== CHAR_BOM + && !isWhitespace(c) // - s-white + // - (c-indicator ::= + // “-” | “?” | “:” | “,” | “[” | “]” | “{” | “}” + && c !== CHAR_MINUS + && c !== CHAR_QUESTION + && c !== CHAR_COLON + && c !== CHAR_COMMA + && c !== CHAR_LEFT_SQUARE_BRACKET + && c !== CHAR_RIGHT_SQUARE_BRACKET + && c !== CHAR_LEFT_CURLY_BRACKET + && c !== CHAR_RIGHT_CURLY_BRACKET + // | “#” | “&” | “*” | “!” | “|” | “=” | “>” | “'” | “"” + && c !== CHAR_SHARP + && c !== CHAR_AMPERSAND + && c !== CHAR_ASTERISK + && c !== CHAR_EXCLAMATION + && c !== CHAR_VERTICAL_LINE + && c !== CHAR_EQUALS + && c !== CHAR_GREATER_THAN + && c !== CHAR_SINGLE_QUOTE + && c !== CHAR_DOUBLE_QUOTE + // | “%” | “@” | “`”) + && c !== CHAR_PERCENT + && c !== CHAR_COMMERCIAL_AT + && c !== CHAR_GRAVE_ACCENT; +} + +// Simplified test for values allowed as the last character in plain style. +function isPlainSafeLast(c) { + // just not whitespace or colon, it will be checked to be plain character later + return !isWhitespace(c) && c !== CHAR_COLON; +} + +// Same as 'string'.codePointAt(pos), but works in older browsers. +function codePointAt(string, pos) { + var first = string.charCodeAt(pos), second; + if (first >= 0xD800 && first <= 0xDBFF && pos + 1 < string.length) { + second = string.charCodeAt(pos + 1); + if (second >= 0xDC00 && second <= 0xDFFF) { + // https://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae + return (first - 0xD800) * 0x400 + second - 0xDC00 + 0x10000; + } + } + return first; +} + +// Determines whether block indentation indicator is required. +function needIndentIndicator(string) { + var leadingSpaceRe = /^\n* /; + return leadingSpaceRe.test(string); +} + +var STYLE_PLAIN = 1, + STYLE_SINGLE = 2, + STYLE_LITERAL = 3, + STYLE_FOLDED = 4, + STYLE_DOUBLE = 5; + +// Determines which scalar styles are possible and returns the preferred style. +// lineWidth = -1 => no limit. +// Pre-conditions: str.length > 0. +// Post-conditions: +// STYLE_PLAIN or STYLE_SINGLE => no \n are in the string. +// STYLE_LITERAL => no lines are suitable for folding (or lineWidth is -1). +// STYLE_FOLDED => a line > lineWidth and can be folded (and lineWidth != -1). +function chooseScalarStyle(string, singleLineOnly, indentPerLevel, lineWidth, + testAmbiguousType, quotingType, forceQuotes, inblock) { + + var i; + var char = 0; + var prevChar = null; + var hasLineBreak = false; + var hasFoldableLine = false; // only checked if shouldTrackWidth + var shouldTrackWidth = lineWidth !== -1; + var previousLineBreak = -1; // count the first line correctly + var plain = isPlainSafeFirst(codePointAt(string, 0)) + && isPlainSafeLast(codePointAt(string, string.length - 1)); + + if (singleLineOnly || forceQuotes) { + // Case: no block styles. + // Check for disallowed characters to rule out plain and single. + for (i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) { + char = codePointAt(string, i); + if (!isPrintable(char)) { + return STYLE_DOUBLE; + } + plain = plain && isPlainSafe(char, prevChar, inblock); + prevChar = char; + } + } else { + // Case: block styles permitted. + for (i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) { + char = codePointAt(string, i); + if (char === CHAR_LINE_FEED) { + hasLineBreak = true; + // Check if any line can be folded. + if (shouldTrackWidth) { + hasFoldableLine = hasFoldableLine || + // Foldable line = too long, and not more-indented. + (i - previousLineBreak - 1 > lineWidth && + string[previousLineBreak + 1] !== ' '); + previousLineBreak = i; + } + } else if (!isPrintable(char)) { + return STYLE_DOUBLE; + } + plain = plain && isPlainSafe(char, prevChar, inblock); + prevChar = char; + } + // in case the end is missing a \n + hasFoldableLine = hasFoldableLine || (shouldTrackWidth && + (i - previousLineBreak - 1 > lineWidth && + string[previousLineBreak + 1] !== ' ')); + } + // Although every style can represent \n without escaping, prefer block styles + // for multiline, since they're more readable and they don't add empty lines. + // Also prefer folding a super-long line. + if (!hasLineBreak && !hasFoldableLine) { + // Strings interpretable as another type have to be quoted; + // e.g. the string 'true' vs. the boolean true. + if (plain && !forceQuotes && !testAmbiguousType(string)) { + return STYLE_PLAIN; + } + return quotingType === QUOTING_TYPE_DOUBLE ? STYLE_DOUBLE : STYLE_SINGLE; + } + // Edge case: block indentation indicator can only have one digit. + if (indentPerLevel > 9 && needIndentIndicator(string)) { + return STYLE_DOUBLE; + } + // At this point we know block styles are valid. + // Prefer literal style unless we want to fold. + if (!forceQuotes) { + return hasFoldableLine ? STYLE_FOLDED : STYLE_LITERAL; + } + return quotingType === QUOTING_TYPE_DOUBLE ? STYLE_DOUBLE : STYLE_SINGLE; +} + +// Note: line breaking/folding is implemented for only the folded style. +// NB. We drop the last trailing newline (if any) of a returned block scalar +// since the dumper adds its own newline. This always works: +// • No ending newline => unaffected; already using strip "-" chomping. +// • Ending newline => removed then restored. +// Importantly, this keeps the "+" chomp indicator from gaining an extra line. +function writeScalar(state, string, level, iskey, inblock) { + state.dump = (function () { + if (string.length === 0) { + return state.quotingType === QUOTING_TYPE_DOUBLE ? '""' : "''"; + } + if (!state.noCompatMode) { + if (DEPRECATED_BOOLEANS_SYNTAX.indexOf(string) !== -1 || DEPRECATED_BASE60_SYNTAX.test(string)) { + return state.quotingType === QUOTING_TYPE_DOUBLE ? ('"' + string + '"') : ("'" + string + "'"); + } + } + + var indent = state.indent * Math.max(1, level); // no 0-indent scalars + // As indentation gets deeper, let the width decrease monotonically + // to the lower bound min(state.lineWidth, 40). + // Note that this implies + // state.lineWidth ≤ 40 + state.indent: width is fixed at the lower bound. + // state.lineWidth > 40 + state.indent: width decreases until the lower bound. + // This behaves better than a constant minimum width which disallows narrower options, + // or an indent threshold which causes the width to suddenly increase. + var lineWidth = state.lineWidth === -1 + ? -1 : Math.max(Math.min(state.lineWidth, 40), state.lineWidth - indent); + + // Without knowing if keys are implicit/explicit, assume implicit for safety. + var singleLineOnly = iskey + // No block styles in flow mode. + || (state.flowLevel > -1 && level >= state.flowLevel); + function testAmbiguity(string) { + return testImplicitResolving(state, string); + } + + switch (chooseScalarStyle(string, singleLineOnly, state.indent, lineWidth, + testAmbiguity, state.quotingType, state.forceQuotes && !iskey, inblock)) { + + case STYLE_PLAIN: + return string; + case STYLE_SINGLE: + return "'" + string.replace(/'/g, "''") + "'"; + case STYLE_LITERAL: + return '|' + blockHeader(string, state.indent) + + dropEndingNewline(indentString(string, indent)); + case STYLE_FOLDED: + return '>' + blockHeader(string, state.indent) + + dropEndingNewline(indentString(foldString(string, lineWidth), indent)); + case STYLE_DOUBLE: + return '"' + escapeString(string, lineWidth) + '"'; + default: + throw new YAMLException('impossible error: invalid scalar style'); + } + }()); +} + +// Pre-conditions: string is valid for a block scalar, 1 <= indentPerLevel <= 9. +function blockHeader(string, indentPerLevel) { + var indentIndicator = needIndentIndicator(string) ? String(indentPerLevel) : ''; + + // note the special case: the string '\n' counts as a "trailing" empty line. + var clip = string[string.length - 1] === '\n'; + var keep = clip && (string[string.length - 2] === '\n' || string === '\n'); + var chomp = keep ? '+' : (clip ? '' : '-'); + + return indentIndicator + chomp + '\n'; +} + +// (See the note for writeScalar.) +function dropEndingNewline(string) { + return string[string.length - 1] === '\n' ? string.slice(0, -1) : string; +} + +// Note: a long line without a suitable break point will exceed the width limit. +// Pre-conditions: every char in str isPrintable, str.length > 0, width > 0. +function foldString(string, width) { + // In folded style, $k$ consecutive newlines output as $k+1$ newlines— + // unless they're before or after a more-indented line, or at the very + // beginning or end, in which case $k$ maps to $k$. + // Therefore, parse each chunk as newline(s) followed by a content line. + var lineRe = /(\n+)([^\n]*)/g; + + // first line (possibly an empty line) + var result = (function () { + var nextLF = string.indexOf('\n'); + nextLF = nextLF !== -1 ? nextLF : string.length; + lineRe.lastIndex = nextLF; + return foldLine(string.slice(0, nextLF), width); + }()); + // If we haven't reached the first content line yet, don't add an extra \n. + var prevMoreIndented = string[0] === '\n' || string[0] === ' '; + var moreIndented; + + // rest of the lines + var match; + while ((match = lineRe.exec(string))) { + var prefix = match[1], line = match[2]; + moreIndented = (line[0] === ' '); + result += prefix + + (!prevMoreIndented && !moreIndented && line !== '' + ? '\n' : '') + + foldLine(line, width); + prevMoreIndented = moreIndented; + } + + return result; +} + +// Greedy line breaking. +// Picks the longest line under the limit each time, +// otherwise settles for the shortest line over the limit. +// NB. More-indented lines *cannot* be folded, as that would add an extra \n. +function foldLine(line, width) { + if (line === '' || line[0] === ' ') return line; + + // Since a more-indented line adds a \n, breaks can't be followed by a space. + var breakRe = / [^ ]/g; // note: the match index will always be <= length-2. + var match; + // start is an inclusive index. end, curr, and next are exclusive. + var start = 0, end, curr = 0, next = 0; + var result = ''; + + // Invariants: 0 <= start <= length-1. + // 0 <= curr <= next <= max(0, length-2). curr - start <= width. + // Inside the loop: + // A match implies length >= 2, so curr and next are <= length-2. + while ((match = breakRe.exec(line))) { + next = match.index; + // maintain invariant: curr - start <= width + if (next - start > width) { + end = (curr > start) ? curr : next; // derive end <= length-2 + result += '\n' + line.slice(start, end); + // skip the space that was output as \n + start = end + 1; // derive start <= length-1 + } + curr = next; + } + + // By the invariants, start <= length-1, so there is something left over. + // It is either the whole string or a part starting from non-whitespace. + result += '\n'; + // Insert a break if the remainder is too long and there is a break available. + if (line.length - start > width && curr > start) { + result += line.slice(start, curr) + '\n' + line.slice(curr + 1); + } else { + result += line.slice(start); + } + + return result.slice(1); // drop extra \n joiner +} + +// Escapes a double-quoted string. +function escapeString(string) { + var result = ''; + var char = 0; + var escapeSeq; + + for (var i = 0; i < string.length; char >= 0x10000 ? i += 2 : i++) { + char = codePointAt(string, i); + escapeSeq = ESCAPE_SEQUENCES[char]; + + if (!escapeSeq && isPrintable(char)) { + result += string[i]; + if (char >= 0x10000) result += string[i + 1]; + } else { + result += escapeSeq || encodeHex(char); + } + } + + return result; +} + +function writeFlowSequence(state, level, object) { + var _result = '', + _tag = state.tag, + index, + length, + value; + + for (index = 0, length = object.length; index < length; index += 1) { + value = object[index]; + + if (state.replacer) { + value = state.replacer.call(object, String(index), value); + } + + // Write only valid elements, put null instead of invalid elements. + if (writeNode(state, level, value, false, false) || + (typeof value === 'undefined' && + writeNode(state, level, null, false, false))) { + + if (_result !== '') _result += ',' + (!state.condenseFlow ? ' ' : ''); + _result += state.dump; + } + } + + state.tag = _tag; + state.dump = '[' + _result + ']'; +} + +function writeBlockSequence(state, level, object, compact) { + var _result = '', + _tag = state.tag, + index, + length, + value; + + for (index = 0, length = object.length; index < length; index += 1) { + value = object[index]; + + if (state.replacer) { + value = state.replacer.call(object, String(index), value); + } + + // Write only valid elements, put null instead of invalid elements. + if (writeNode(state, level + 1, value, true, true, false, true) || + (typeof value === 'undefined' && + writeNode(state, level + 1, null, true, true, false, true))) { + + if (!compact || _result !== '') { + _result += generateNextLine(state, level); + } + + if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { + _result += '-'; + } else { + _result += '- '; + } + + _result += state.dump; + } + } + + state.tag = _tag; + state.dump = _result || '[]'; // Empty sequence if no valid values. +} + +function writeFlowMapping(state, level, object) { + var _result = '', + _tag = state.tag, + objectKeyList = Object.keys(object), + index, + length, + objectKey, + objectValue, + pairBuffer; + + for (index = 0, length = objectKeyList.length; index < length; index += 1) { + + pairBuffer = ''; + if (_result !== '') pairBuffer += ', '; + + if (state.condenseFlow) pairBuffer += '"'; + + objectKey = objectKeyList[index]; + objectValue = object[objectKey]; + + if (state.replacer) { + objectValue = state.replacer.call(object, objectKey, objectValue); + } + + if (!writeNode(state, level, objectKey, false, false)) { + continue; // Skip this pair because of invalid key; + } + + if (state.dump.length > 1024) pairBuffer += '? '; + + pairBuffer += state.dump + (state.condenseFlow ? '"' : '') + ':' + (state.condenseFlow ? '' : ' '); + + if (!writeNode(state, level, objectValue, false, false)) { + continue; // Skip this pair because of invalid value. + } + + pairBuffer += state.dump; + + // Both key and value are valid. + _result += pairBuffer; + } + + state.tag = _tag; + state.dump = '{' + _result + '}'; +} + +function writeBlockMapping(state, level, object, compact) { + var _result = '', + _tag = state.tag, + objectKeyList = Object.keys(object), + index, + length, + objectKey, + objectValue, + explicitPair, + pairBuffer; + + // Allow sorting keys so that the output file is deterministic + if (state.sortKeys === true) { + // Default sorting + objectKeyList.sort(); + } else if (typeof state.sortKeys === 'function') { + // Custom sort function + objectKeyList.sort(state.sortKeys); + } else if (state.sortKeys) { + // Something is wrong + throw new YAMLException('sortKeys must be a boolean or a function'); + } + + for (index = 0, length = objectKeyList.length; index < length; index += 1) { + pairBuffer = ''; + + if (!compact || _result !== '') { + pairBuffer += generateNextLine(state, level); + } + + objectKey = objectKeyList[index]; + objectValue = object[objectKey]; + + if (state.replacer) { + objectValue = state.replacer.call(object, objectKey, objectValue); + } + + if (!writeNode(state, level + 1, objectKey, true, true, true)) { + continue; // Skip this pair because of invalid key. + } + + explicitPair = (state.tag !== null && state.tag !== '?') || + (state.dump && state.dump.length > 1024); + + if (explicitPair) { + if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { + pairBuffer += '?'; + } else { + pairBuffer += '? '; + } + } + + pairBuffer += state.dump; + + if (explicitPair) { + pairBuffer += generateNextLine(state, level); + } + + if (!writeNode(state, level + 1, objectValue, true, explicitPair)) { + continue; // Skip this pair because of invalid value. + } + + if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { + pairBuffer += ':'; + } else { + pairBuffer += ': '; + } + + pairBuffer += state.dump; + + // Both key and value are valid. + _result += pairBuffer; + } + + state.tag = _tag; + state.dump = _result || '{}'; // Empty mapping if no valid pairs. +} + +function detectType(state, object, explicit) { + var _result, typeList, index, length, type, style; + + typeList = explicit ? state.explicitTypes : state.implicitTypes; + + for (index = 0, length = typeList.length; index < length; index += 1) { + type = typeList[index]; + + if ((type.instanceOf || type.predicate) && + (!type.instanceOf || ((typeof object === 'object') && (object instanceof type.instanceOf))) && + (!type.predicate || type.predicate(object))) { + + if (explicit) { + if (type.multi && type.representName) { + state.tag = type.representName(object); + } else { + state.tag = type.tag; + } + } else { + state.tag = '?'; + } + + if (type.represent) { + style = state.styleMap[type.tag] || type.defaultStyle; + + if (_toString.call(type.represent) === '[object Function]') { + _result = type.represent(object, style); + } else if (_hasOwnProperty.call(type.represent, style)) { + _result = type.represent[style](object, style); + } else { + throw new YAMLException('!<' + type.tag + '> tag resolver accepts not "' + style + '" style'); + } + + state.dump = _result; + } + + return true; + } + } + + return false; +} + +// Serializes `object` and writes it to global `result`. +// Returns true on success, or false on invalid object. +// +function writeNode(state, level, object, block, compact, iskey, isblockseq) { + state.tag = null; + state.dump = object; + + if (!detectType(state, object, false)) { + detectType(state, object, true); + } + + var type = _toString.call(state.dump); + var inblock = block; + var tagStr; + + if (block) { + block = (state.flowLevel < 0 || state.flowLevel > level); + } + + var objectOrArray = type === '[object Object]' || type === '[object Array]', + duplicateIndex, + duplicate; + + if (objectOrArray) { + duplicateIndex = state.duplicates.indexOf(object); + duplicate = duplicateIndex !== -1; + } + + if ((state.tag !== null && state.tag !== '?') || duplicate || (state.indent !== 2 && level > 0)) { + compact = false; + } + + if (duplicate && state.usedDuplicates[duplicateIndex]) { + state.dump = '*ref_' + duplicateIndex; + } else { + if (objectOrArray && duplicate && !state.usedDuplicates[duplicateIndex]) { + state.usedDuplicates[duplicateIndex] = true; + } + if (type === '[object Object]') { + if (block && (Object.keys(state.dump).length !== 0)) { + writeBlockMapping(state, level, state.dump, compact); + if (duplicate) { + state.dump = '&ref_' + duplicateIndex + state.dump; + } + } else { + writeFlowMapping(state, level, state.dump); + if (duplicate) { + state.dump = '&ref_' + duplicateIndex + ' ' + state.dump; + } + } + } else if (type === '[object Array]') { + if (block && (state.dump.length !== 0)) { + if (state.noArrayIndent && !isblockseq && level > 0) { + writeBlockSequence(state, level - 1, state.dump, compact); + } else { + writeBlockSequence(state, level, state.dump, compact); + } + if (duplicate) { + state.dump = '&ref_' + duplicateIndex + state.dump; + } + } else { + writeFlowSequence(state, level, state.dump); + if (duplicate) { + state.dump = '&ref_' + duplicateIndex + ' ' + state.dump; + } + } + } else if (type === '[object String]') { + if (state.tag !== '?') { + writeScalar(state, state.dump, level, iskey, inblock); + } + } else if (type === '[object Undefined]') { + return false; + } else { + if (state.skipInvalid) return false; + throw new YAMLException('unacceptable kind of an object to dump ' + type); + } + + if (state.tag !== null && state.tag !== '?') { + // Need to encode all characters except those allowed by the spec: + // + // [35] ns-dec-digit ::= [#x30-#x39] /* 0-9 */ + // [36] ns-hex-digit ::= ns-dec-digit + // | [#x41-#x46] /* A-F */ | [#x61-#x66] /* a-f */ + // [37] ns-ascii-letter ::= [#x41-#x5A] /* A-Z */ | [#x61-#x7A] /* a-z */ + // [38] ns-word-char ::= ns-dec-digit | ns-ascii-letter | “-” + // [39] ns-uri-char ::= “%” ns-hex-digit ns-hex-digit | ns-word-char | “#” + // | “;” | “/” | “?” | “:” | “@” | “&” | “=” | “+” | “$” | “,” + // | “_” | “.” | “!” | “~” | “*” | “'” | “(” | “)” | “[” | “]” + // + // Also need to encode '!' because it has special meaning (end of tag prefix). + // + tagStr = encodeURI( + state.tag[0] === '!' ? state.tag.slice(1) : state.tag + ).replace(/!/g, '%21'); + + if (state.tag[0] === '!') { + tagStr = '!' + tagStr; + } else if (tagStr.slice(0, 18) === 'tag:yaml.org,2002:') { + tagStr = '!!' + tagStr.slice(18); + } else { + tagStr = '!<' + tagStr + '>'; + } + + state.dump = tagStr + ' ' + state.dump; + } + } + + return true; +} + +function getDuplicateReferences(object, state) { + var objects = [], + duplicatesIndexes = [], + index, + length; + + inspectNode(object, objects, duplicatesIndexes); + + for (index = 0, length = duplicatesIndexes.length; index < length; index += 1) { + state.duplicates.push(objects[duplicatesIndexes[index]]); + } + state.usedDuplicates = new Array(length); +} + +function inspectNode(object, objects, duplicatesIndexes) { + var objectKeyList, + index, + length; + + if (object !== null && typeof object === 'object') { + index = objects.indexOf(object); + if (index !== -1) { + if (duplicatesIndexes.indexOf(index) === -1) { + duplicatesIndexes.push(index); + } + } else { + objects.push(object); + + if (Array.isArray(object)) { + for (index = 0, length = object.length; index < length; index += 1) { + inspectNode(object[index], objects, duplicatesIndexes); + } + } else { + objectKeyList = Object.keys(object); + + for (index = 0, length = objectKeyList.length; index < length; index += 1) { + inspectNode(object[objectKeyList[index]], objects, duplicatesIndexes); + } + } + } + } +} + +function dump(input, options) { + options = options || {}; + + var state = new State(options); + + if (!state.noRefs) getDuplicateReferences(input, state); + + var value = input; + + if (state.replacer) { + value = state.replacer.call({ '': value }, '', value); + } + + if (writeNode(state, 0, value, true, true)) return state.dump + '\n'; + + return ''; +} + +module.exports.dump = dump; + + +/***/ }), + +/***/ 1248: +/***/ ((module) => { + +"use strict"; +// YAML error class. http://stackoverflow.com/questions/8458984 +// + + + +function formatError(exception, compact) { + var where = '', message = exception.reason || '(unknown reason)'; + + if (!exception.mark) return message; + + if (exception.mark.name) { + where += 'in "' + exception.mark.name + '" '; + } + + where += '(' + (exception.mark.line + 1) + ':' + (exception.mark.column + 1) + ')'; + + if (!compact && exception.mark.snippet) { + where += '\n\n' + exception.mark.snippet; + } + + return message + ' ' + where; +} + + +function YAMLException(reason, mark) { + // Super constructor + Error.call(this); + + this.name = 'YAMLException'; + this.reason = reason; + this.mark = mark; + this.message = formatError(this, false); + + // Include stack trace in error object + if (Error.captureStackTrace) { + // Chrome and NodeJS + Error.captureStackTrace(this, this.constructor); + } else { + // FF, IE 10+ and Safari 6+. Fallback for others + this.stack = (new Error()).stack || ''; + } +} + + +// Inherit from Error +YAMLException.prototype = Object.create(Error.prototype); +YAMLException.prototype.constructor = YAMLException; + + +YAMLException.prototype.toString = function toString(compact) { + return this.name + ': ' + formatError(this, compact); +}; + + +module.exports = YAMLException; + + +/***/ }), + +/***/ 1950: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +/*eslint-disable max-len,no-use-before-define*/ + +var common = __nccwpck_require__(9816); +var YAMLException = __nccwpck_require__(1248); +var makeSnippet = __nccwpck_require__(9440); +var DEFAULT_SCHEMA = __nccwpck_require__(7336); + + +var _hasOwnProperty = Object.prototype.hasOwnProperty; + + +var CONTEXT_FLOW_IN = 1; +var CONTEXT_FLOW_OUT = 2; +var CONTEXT_BLOCK_IN = 3; +var CONTEXT_BLOCK_OUT = 4; + + +var CHOMPING_CLIP = 1; +var CHOMPING_STRIP = 2; +var CHOMPING_KEEP = 3; + + +var PATTERN_NON_PRINTABLE = /[\x00-\x08\x0B\x0C\x0E-\x1F\x7F-\x84\x86-\x9F\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/; +var PATTERN_NON_ASCII_LINE_BREAKS = /[\x85\u2028\u2029]/; +var PATTERN_FLOW_INDICATORS = /[,\[\]\{\}]/; +var PATTERN_TAG_HANDLE = /^(?:!|!!|![a-z\-]+!)$/i; +var PATTERN_TAG_URI = /^(?:!|[^,\[\]\{\}])(?:%[0-9a-f]{2}|[0-9a-z\-#;\/\?:@&=\+\$,_\.!~\*'\(\)\[\]])*$/i; + + +function _class(obj) { return Object.prototype.toString.call(obj); } + +function is_EOL(c) { + return (c === 0x0A/* LF */) || (c === 0x0D/* CR */); +} + +function is_WHITE_SPACE(c) { + return (c === 0x09/* Tab */) || (c === 0x20/* Space */); +} + +function is_WS_OR_EOL(c) { + return (c === 0x09/* Tab */) || + (c === 0x20/* Space */) || + (c === 0x0A/* LF */) || + (c === 0x0D/* CR */); +} + +function is_FLOW_INDICATOR(c) { + return c === 0x2C/* , */ || + c === 0x5B/* [ */ || + c === 0x5D/* ] */ || + c === 0x7B/* { */ || + c === 0x7D/* } */; +} + +function fromHexCode(c) { + var lc; + + if ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) { + return c - 0x30; + } + + /*eslint-disable no-bitwise*/ + lc = c | 0x20; + + if ((0x61/* a */ <= lc) && (lc <= 0x66/* f */)) { + return lc - 0x61 + 10; + } + + return -1; +} + +function escapedHexLen(c) { + if (c === 0x78/* x */) { return 2; } + if (c === 0x75/* u */) { return 4; } + if (c === 0x55/* U */) { return 8; } + return 0; +} + +function fromDecimalCode(c) { + if ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) { + return c - 0x30; + } + + return -1; +} + +function simpleEscapeSequence(c) { + /* eslint-disable indent */ + return (c === 0x30/* 0 */) ? '\x00' : + (c === 0x61/* a */) ? '\x07' : + (c === 0x62/* b */) ? '\x08' : + (c === 0x74/* t */) ? '\x09' : + (c === 0x09/* Tab */) ? '\x09' : + (c === 0x6E/* n */) ? '\x0A' : + (c === 0x76/* v */) ? '\x0B' : + (c === 0x66/* f */) ? '\x0C' : + (c === 0x72/* r */) ? '\x0D' : + (c === 0x65/* e */) ? '\x1B' : + (c === 0x20/* Space */) ? ' ' : + (c === 0x22/* " */) ? '\x22' : + (c === 0x2F/* / */) ? '/' : + (c === 0x5C/* \ */) ? '\x5C' : + (c === 0x4E/* N */) ? '\x85' : + (c === 0x5F/* _ */) ? '\xA0' : + (c === 0x4C/* L */) ? '\u2028' : + (c === 0x50/* P */) ? '\u2029' : ''; +} + +function charFromCodepoint(c) { + if (c <= 0xFFFF) { + return String.fromCharCode(c); + } + // Encode UTF-16 surrogate pair + // https://en.wikipedia.org/wiki/UTF-16#Code_points_U.2B010000_to_U.2B10FFFF + return String.fromCharCode( + ((c - 0x010000) >> 10) + 0xD800, + ((c - 0x010000) & 0x03FF) + 0xDC00 + ); +} + +var simpleEscapeCheck = new Array(256); // integer, for fast access +var simpleEscapeMap = new Array(256); +for (var i = 0; i < 256; i++) { + simpleEscapeCheck[i] = simpleEscapeSequence(i) ? 1 : 0; + simpleEscapeMap[i] = simpleEscapeSequence(i); +} + + +function State(input, options) { + this.input = input; + + this.filename = options['filename'] || null; + this.schema = options['schema'] || DEFAULT_SCHEMA; + this.onWarning = options['onWarning'] || null; + // (Hidden) Remove? makes the loader to expect YAML 1.1 documents + // if such documents have no explicit %YAML directive + this.legacy = options['legacy'] || false; + + this.json = options['json'] || false; + this.listener = options['listener'] || null; + + this.implicitTypes = this.schema.compiledImplicit; + this.typeMap = this.schema.compiledTypeMap; + + this.length = input.length; + this.position = 0; + this.line = 0; + this.lineStart = 0; + this.lineIndent = 0; + + // position of first leading tab in the current line, + // used to make sure there are no tabs in the indentation + this.firstTabInLine = -1; + + this.documents = []; + + /* + this.version; + this.checkLineBreaks; + this.tagMap; + this.anchorMap; + this.tag; + this.anchor; + this.kind; + this.result;*/ + +} + + +function generateError(state, message) { + var mark = { + name: state.filename, + buffer: state.input.slice(0, -1), // omit trailing \0 + position: state.position, + line: state.line, + column: state.position - state.lineStart + }; + + mark.snippet = makeSnippet(mark); + + return new YAMLException(message, mark); +} + +function throwError(state, message) { + throw generateError(state, message); +} + +function throwWarning(state, message) { + if (state.onWarning) { + state.onWarning.call(null, generateError(state, message)); + } +} + + +var directiveHandlers = { + + YAML: function handleYamlDirective(state, name, args) { + + var match, major, minor; + + if (state.version !== null) { + throwError(state, 'duplication of %YAML directive'); + } + + if (args.length !== 1) { + throwError(state, 'YAML directive accepts exactly one argument'); + } + + match = /^([0-9]+)\.([0-9]+)$/.exec(args[0]); + + if (match === null) { + throwError(state, 'ill-formed argument of the YAML directive'); + } + + major = parseInt(match[1], 10); + minor = parseInt(match[2], 10); + + if (major !== 1) { + throwError(state, 'unacceptable YAML version of the document'); + } + + state.version = args[0]; + state.checkLineBreaks = (minor < 2); + + if (minor !== 1 && minor !== 2) { + throwWarning(state, 'unsupported YAML version of the document'); + } + }, + + TAG: function handleTagDirective(state, name, args) { + + var handle, prefix; + + if (args.length !== 2) { + throwError(state, 'TAG directive accepts exactly two arguments'); + } + + handle = args[0]; + prefix = args[1]; + + if (!PATTERN_TAG_HANDLE.test(handle)) { + throwError(state, 'ill-formed tag handle (first argument) of the TAG directive'); + } + + if (_hasOwnProperty.call(state.tagMap, handle)) { + throwError(state, 'there is a previously declared suffix for "' + handle + '" tag handle'); + } + + if (!PATTERN_TAG_URI.test(prefix)) { + throwError(state, 'ill-formed tag prefix (second argument) of the TAG directive'); + } + + try { + prefix = decodeURIComponent(prefix); + } catch (err) { + throwError(state, 'tag prefix is malformed: ' + prefix); + } + + state.tagMap[handle] = prefix; + } +}; + + +function captureSegment(state, start, end, checkJson) { + var _position, _length, _character, _result; + + if (start < end) { + _result = state.input.slice(start, end); + + if (checkJson) { + for (_position = 0, _length = _result.length; _position < _length; _position += 1) { + _character = _result.charCodeAt(_position); + if (!(_character === 0x09 || + (0x20 <= _character && _character <= 0x10FFFF))) { + throwError(state, 'expected valid JSON character'); + } + } + } else if (PATTERN_NON_PRINTABLE.test(_result)) { + throwError(state, 'the stream contains non-printable characters'); + } + + state.result += _result; + } +} + +function mergeMappings(state, destination, source, overridableKeys) { + var sourceKeys, key, index, quantity; + + if (!common.isObject(source)) { + throwError(state, 'cannot merge mappings; the provided source object is unacceptable'); + } + + sourceKeys = Object.keys(source); + + for (index = 0, quantity = sourceKeys.length; index < quantity; index += 1) { + key = sourceKeys[index]; + + if (!_hasOwnProperty.call(destination, key)) { + destination[key] = source[key]; + overridableKeys[key] = true; + } + } +} + +function storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, + startLine, startLineStart, startPos) { + + var index, quantity; + + // The output is a plain object here, so keys can only be strings. + // We need to convert keyNode to a string, but doing so can hang the process + // (deeply nested arrays that explode exponentially using aliases). + if (Array.isArray(keyNode)) { + keyNode = Array.prototype.slice.call(keyNode); + + for (index = 0, quantity = keyNode.length; index < quantity; index += 1) { + if (Array.isArray(keyNode[index])) { + throwError(state, 'nested arrays are not supported inside keys'); + } + + if (typeof keyNode === 'object' && _class(keyNode[index]) === '[object Object]') { + keyNode[index] = '[object Object]'; + } + } + } + + // Avoid code execution in load() via toString property + // (still use its own toString for arrays, timestamps, + // and whatever user schema extensions happen to have @@toStringTag) + if (typeof keyNode === 'object' && _class(keyNode) === '[object Object]') { + keyNode = '[object Object]'; + } + + + keyNode = String(keyNode); + + if (_result === null) { + _result = {}; + } + + if (keyTag === 'tag:yaml.org,2002:merge') { + if (Array.isArray(valueNode)) { + for (index = 0, quantity = valueNode.length; index < quantity; index += 1) { + mergeMappings(state, _result, valueNode[index], overridableKeys); + } + } else { + mergeMappings(state, _result, valueNode, overridableKeys); + } + } else { + if (!state.json && + !_hasOwnProperty.call(overridableKeys, keyNode) && + _hasOwnProperty.call(_result, keyNode)) { + state.line = startLine || state.line; + state.lineStart = startLineStart || state.lineStart; + state.position = startPos || state.position; + throwError(state, 'duplicated mapping key'); + } + + // used for this specific key only because Object.defineProperty is slow + if (keyNode === '__proto__') { + Object.defineProperty(_result, keyNode, { + configurable: true, + enumerable: true, + writable: true, + value: valueNode + }); + } else { + _result[keyNode] = valueNode; + } + delete overridableKeys[keyNode]; + } + + return _result; +} + +function readLineBreak(state) { + var ch; + + ch = state.input.charCodeAt(state.position); + + if (ch === 0x0A/* LF */) { + state.position++; + } else if (ch === 0x0D/* CR */) { + state.position++; + if (state.input.charCodeAt(state.position) === 0x0A/* LF */) { + state.position++; + } + } else { + throwError(state, 'a line break is expected'); + } + + state.line += 1; + state.lineStart = state.position; + state.firstTabInLine = -1; +} + +function skipSeparationSpace(state, allowComments, checkIndent) { + var lineBreaks = 0, + ch = state.input.charCodeAt(state.position); + + while (ch !== 0) { + while (is_WHITE_SPACE(ch)) { + if (ch === 0x09/* Tab */ && state.firstTabInLine === -1) { + state.firstTabInLine = state.position; + } + ch = state.input.charCodeAt(++state.position); + } + + if (allowComments && ch === 0x23/* # */) { + do { + ch = state.input.charCodeAt(++state.position); + } while (ch !== 0x0A/* LF */ && ch !== 0x0D/* CR */ && ch !== 0); + } + + if (is_EOL(ch)) { + readLineBreak(state); + + ch = state.input.charCodeAt(state.position); + lineBreaks++; + state.lineIndent = 0; + + while (ch === 0x20/* Space */) { + state.lineIndent++; + ch = state.input.charCodeAt(++state.position); + } + } else { + break; + } + } + + if (checkIndent !== -1 && lineBreaks !== 0 && state.lineIndent < checkIndent) { + throwWarning(state, 'deficient indentation'); + } + + return lineBreaks; +} + +function testDocumentSeparator(state) { + var _position = state.position, + ch; + + ch = state.input.charCodeAt(_position); + + // Condition state.position === state.lineStart is tested + // in parent on each call, for efficiency. No needs to test here again. + if ((ch === 0x2D/* - */ || ch === 0x2E/* . */) && + ch === state.input.charCodeAt(_position + 1) && + ch === state.input.charCodeAt(_position + 2)) { + + _position += 3; + + ch = state.input.charCodeAt(_position); + + if (ch === 0 || is_WS_OR_EOL(ch)) { + return true; + } + } + + return false; +} + +function writeFoldedLines(state, count) { + if (count === 1) { + state.result += ' '; + } else if (count > 1) { + state.result += common.repeat('\n', count - 1); + } +} + + +function readPlainScalar(state, nodeIndent, withinFlowCollection) { + var preceding, + following, + captureStart, + captureEnd, + hasPendingContent, + _line, + _lineStart, + _lineIndent, + _kind = state.kind, + _result = state.result, + ch; + + ch = state.input.charCodeAt(state.position); + + if (is_WS_OR_EOL(ch) || + is_FLOW_INDICATOR(ch) || + ch === 0x23/* # */ || + ch === 0x26/* & */ || + ch === 0x2A/* * */ || + ch === 0x21/* ! */ || + ch === 0x7C/* | */ || + ch === 0x3E/* > */ || + ch === 0x27/* ' */ || + ch === 0x22/* " */ || + ch === 0x25/* % */ || + ch === 0x40/* @ */ || + ch === 0x60/* ` */) { + return false; + } + + if (ch === 0x3F/* ? */ || ch === 0x2D/* - */) { + following = state.input.charCodeAt(state.position + 1); + + if (is_WS_OR_EOL(following) || + withinFlowCollection && is_FLOW_INDICATOR(following)) { + return false; + } + } + + state.kind = 'scalar'; + state.result = ''; + captureStart = captureEnd = state.position; + hasPendingContent = false; + + while (ch !== 0) { + if (ch === 0x3A/* : */) { + following = state.input.charCodeAt(state.position + 1); + + if (is_WS_OR_EOL(following) || + withinFlowCollection && is_FLOW_INDICATOR(following)) { + break; + } + + } else if (ch === 0x23/* # */) { + preceding = state.input.charCodeAt(state.position - 1); + + if (is_WS_OR_EOL(preceding)) { + break; + } + + } else if ((state.position === state.lineStart && testDocumentSeparator(state)) || + withinFlowCollection && is_FLOW_INDICATOR(ch)) { + break; + + } else if (is_EOL(ch)) { + _line = state.line; + _lineStart = state.lineStart; + _lineIndent = state.lineIndent; + skipSeparationSpace(state, false, -1); + + if (state.lineIndent >= nodeIndent) { + hasPendingContent = true; + ch = state.input.charCodeAt(state.position); + continue; + } else { + state.position = captureEnd; + state.line = _line; + state.lineStart = _lineStart; + state.lineIndent = _lineIndent; + break; + } + } + + if (hasPendingContent) { + captureSegment(state, captureStart, captureEnd, false); + writeFoldedLines(state, state.line - _line); + captureStart = captureEnd = state.position; + hasPendingContent = false; + } + + if (!is_WHITE_SPACE(ch)) { + captureEnd = state.position + 1; + } + + ch = state.input.charCodeAt(++state.position); + } + + captureSegment(state, captureStart, captureEnd, false); + + if (state.result) { + return true; + } + + state.kind = _kind; + state.result = _result; + return false; +} + +function readSingleQuotedScalar(state, nodeIndent) { + var ch, + captureStart, captureEnd; + + ch = state.input.charCodeAt(state.position); + + if (ch !== 0x27/* ' */) { + return false; + } + + state.kind = 'scalar'; + state.result = ''; + state.position++; + captureStart = captureEnd = state.position; + + while ((ch = state.input.charCodeAt(state.position)) !== 0) { + if (ch === 0x27/* ' */) { + captureSegment(state, captureStart, state.position, true); + ch = state.input.charCodeAt(++state.position); + + if (ch === 0x27/* ' */) { + captureStart = state.position; + state.position++; + captureEnd = state.position; + } else { + return true; + } + + } else if (is_EOL(ch)) { + captureSegment(state, captureStart, captureEnd, true); + writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent)); + captureStart = captureEnd = state.position; + + } else if (state.position === state.lineStart && testDocumentSeparator(state)) { + throwError(state, 'unexpected end of the document within a single quoted scalar'); + + } else { + state.position++; + captureEnd = state.position; + } + } + + throwError(state, 'unexpected end of the stream within a single quoted scalar'); +} + +function readDoubleQuotedScalar(state, nodeIndent) { + var captureStart, + captureEnd, + hexLength, + hexResult, + tmp, + ch; + + ch = state.input.charCodeAt(state.position); + + if (ch !== 0x22/* " */) { + return false; + } + + state.kind = 'scalar'; + state.result = ''; + state.position++; + captureStart = captureEnd = state.position; + + while ((ch = state.input.charCodeAt(state.position)) !== 0) { + if (ch === 0x22/* " */) { + captureSegment(state, captureStart, state.position, true); + state.position++; + return true; + + } else if (ch === 0x5C/* \ */) { + captureSegment(state, captureStart, state.position, true); + ch = state.input.charCodeAt(++state.position); + + if (is_EOL(ch)) { + skipSeparationSpace(state, false, nodeIndent); + + // TODO: rework to inline fn with no type cast? + } else if (ch < 256 && simpleEscapeCheck[ch]) { + state.result += simpleEscapeMap[ch]; + state.position++; + + } else if ((tmp = escapedHexLen(ch)) > 0) { + hexLength = tmp; + hexResult = 0; + + for (; hexLength > 0; hexLength--) { + ch = state.input.charCodeAt(++state.position); + + if ((tmp = fromHexCode(ch)) >= 0) { + hexResult = (hexResult << 4) + tmp; + + } else { + throwError(state, 'expected hexadecimal character'); + } + } + + state.result += charFromCodepoint(hexResult); + + state.position++; + + } else { + throwError(state, 'unknown escape sequence'); + } + + captureStart = captureEnd = state.position; + + } else if (is_EOL(ch)) { + captureSegment(state, captureStart, captureEnd, true); + writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent)); + captureStart = captureEnd = state.position; + + } else if (state.position === state.lineStart && testDocumentSeparator(state)) { + throwError(state, 'unexpected end of the document within a double quoted scalar'); + + } else { + state.position++; + captureEnd = state.position; + } + } + + throwError(state, 'unexpected end of the stream within a double quoted scalar'); +} + +function readFlowCollection(state, nodeIndent) { + var readNext = true, + _line, + _lineStart, + _pos, + _tag = state.tag, + _result, + _anchor = state.anchor, + following, + terminator, + isPair, + isExplicitPair, + isMapping, + overridableKeys = Object.create(null), + keyNode, + keyTag, + valueNode, + ch; + + ch = state.input.charCodeAt(state.position); + + if (ch === 0x5B/* [ */) { + terminator = 0x5D;/* ] */ + isMapping = false; + _result = []; + } else if (ch === 0x7B/* { */) { + terminator = 0x7D;/* } */ + isMapping = true; + _result = {}; + } else { + return false; + } + + if (state.anchor !== null) { + state.anchorMap[state.anchor] = _result; + } + + ch = state.input.charCodeAt(++state.position); + + while (ch !== 0) { + skipSeparationSpace(state, true, nodeIndent); + + ch = state.input.charCodeAt(state.position); + + if (ch === terminator) { + state.position++; + state.tag = _tag; + state.anchor = _anchor; + state.kind = isMapping ? 'mapping' : 'sequence'; + state.result = _result; + return true; + } else if (!readNext) { + throwError(state, 'missed comma between flow collection entries'); + } else if (ch === 0x2C/* , */) { + // "flow collection entries can never be completely empty", as per YAML 1.2, section 7.4 + throwError(state, "expected the node content, but found ','"); + } + + keyTag = keyNode = valueNode = null; + isPair = isExplicitPair = false; + + if (ch === 0x3F/* ? */) { + following = state.input.charCodeAt(state.position + 1); + + if (is_WS_OR_EOL(following)) { + isPair = isExplicitPair = true; + state.position++; + skipSeparationSpace(state, true, nodeIndent); + } + } + + _line = state.line; // Save the current line. + _lineStart = state.lineStart; + _pos = state.position; + composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true); + keyTag = state.tag; + keyNode = state.result; + skipSeparationSpace(state, true, nodeIndent); + + ch = state.input.charCodeAt(state.position); + + if ((isExplicitPair || state.line === _line) && ch === 0x3A/* : */) { + isPair = true; + ch = state.input.charCodeAt(++state.position); + skipSeparationSpace(state, true, nodeIndent); + composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true); + valueNode = state.result; + } + + if (isMapping) { + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _line, _lineStart, _pos); + } else if (isPair) { + _result.push(storeMappingPair(state, null, overridableKeys, keyTag, keyNode, valueNode, _line, _lineStart, _pos)); + } else { + _result.push(keyNode); + } + + skipSeparationSpace(state, true, nodeIndent); + + ch = state.input.charCodeAt(state.position); + + if (ch === 0x2C/* , */) { + readNext = true; + ch = state.input.charCodeAt(++state.position); + } else { + readNext = false; + } + } + + throwError(state, 'unexpected end of the stream within a flow collection'); +} + +function readBlockScalar(state, nodeIndent) { + var captureStart, + folding, + chomping = CHOMPING_CLIP, + didReadContent = false, + detectedIndent = false, + textIndent = nodeIndent, + emptyLines = 0, + atMoreIndented = false, + tmp, + ch; + + ch = state.input.charCodeAt(state.position); + + if (ch === 0x7C/* | */) { + folding = false; + } else if (ch === 0x3E/* > */) { + folding = true; + } else { + return false; + } + + state.kind = 'scalar'; + state.result = ''; + + while (ch !== 0) { + ch = state.input.charCodeAt(++state.position); + + if (ch === 0x2B/* + */ || ch === 0x2D/* - */) { + if (CHOMPING_CLIP === chomping) { + chomping = (ch === 0x2B/* + */) ? CHOMPING_KEEP : CHOMPING_STRIP; + } else { + throwError(state, 'repeat of a chomping mode identifier'); + } + + } else if ((tmp = fromDecimalCode(ch)) >= 0) { + if (tmp === 0) { + throwError(state, 'bad explicit indentation width of a block scalar; it cannot be less than one'); + } else if (!detectedIndent) { + textIndent = nodeIndent + tmp - 1; + detectedIndent = true; + } else { + throwError(state, 'repeat of an indentation width identifier'); + } + + } else { + break; + } + } + + if (is_WHITE_SPACE(ch)) { + do { ch = state.input.charCodeAt(++state.position); } + while (is_WHITE_SPACE(ch)); + + if (ch === 0x23/* # */) { + do { ch = state.input.charCodeAt(++state.position); } + while (!is_EOL(ch) && (ch !== 0)); + } + } + + while (ch !== 0) { + readLineBreak(state); + state.lineIndent = 0; + + ch = state.input.charCodeAt(state.position); + + while ((!detectedIndent || state.lineIndent < textIndent) && + (ch === 0x20/* Space */)) { + state.lineIndent++; + ch = state.input.charCodeAt(++state.position); + } + + if (!detectedIndent && state.lineIndent > textIndent) { + textIndent = state.lineIndent; + } + + if (is_EOL(ch)) { + emptyLines++; + continue; + } + + // End of the scalar. + if (state.lineIndent < textIndent) { + + // Perform the chomping. + if (chomping === CHOMPING_KEEP) { + state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines); + } else if (chomping === CHOMPING_CLIP) { + if (didReadContent) { // i.e. only if the scalar is not empty. + state.result += '\n'; + } + } + + // Break this `while` cycle and go to the funciton's epilogue. + break; + } + + // Folded style: use fancy rules to handle line breaks. + if (folding) { + + // Lines starting with white space characters (more-indented lines) are not folded. + if (is_WHITE_SPACE(ch)) { + atMoreIndented = true; + // except for the first content line (cf. Example 8.1) + state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines); + + // End of more-indented block. + } else if (atMoreIndented) { + atMoreIndented = false; + state.result += common.repeat('\n', emptyLines + 1); + + // Just one line break - perceive as the same line. + } else if (emptyLines === 0) { + if (didReadContent) { // i.e. only if we have already read some scalar content. + state.result += ' '; + } + + // Several line breaks - perceive as different lines. + } else { + state.result += common.repeat('\n', emptyLines); + } + + // Literal style: just add exact number of line breaks between content lines. + } else { + // Keep all line breaks except the header line break. + state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines); + } + + didReadContent = true; + detectedIndent = true; + emptyLines = 0; + captureStart = state.position; + + while (!is_EOL(ch) && (ch !== 0)) { + ch = state.input.charCodeAt(++state.position); + } + + captureSegment(state, captureStart, state.position, false); + } + + return true; +} + +function readBlockSequence(state, nodeIndent) { + var _line, + _tag = state.tag, + _anchor = state.anchor, + _result = [], + following, + detected = false, + ch; + + // there is a leading tab before this token, so it can't be a block sequence/mapping; + // it can still be flow sequence/mapping or a scalar + if (state.firstTabInLine !== -1) return false; + + if (state.anchor !== null) { + state.anchorMap[state.anchor] = _result; + } + + ch = state.input.charCodeAt(state.position); + + while (ch !== 0) { + if (state.firstTabInLine !== -1) { + state.position = state.firstTabInLine; + throwError(state, 'tab characters must not be used in indentation'); + } + + if (ch !== 0x2D/* - */) { + break; + } + + following = state.input.charCodeAt(state.position + 1); + + if (!is_WS_OR_EOL(following)) { + break; + } + + detected = true; + state.position++; + + if (skipSeparationSpace(state, true, -1)) { + if (state.lineIndent <= nodeIndent) { + _result.push(null); + ch = state.input.charCodeAt(state.position); + continue; + } + } + + _line = state.line; + composeNode(state, nodeIndent, CONTEXT_BLOCK_IN, false, true); + _result.push(state.result); + skipSeparationSpace(state, true, -1); + + ch = state.input.charCodeAt(state.position); + + if ((state.line === _line || state.lineIndent > nodeIndent) && (ch !== 0)) { + throwError(state, 'bad indentation of a sequence entry'); + } else if (state.lineIndent < nodeIndent) { + break; + } + } + + if (detected) { + state.tag = _tag; + state.anchor = _anchor; + state.kind = 'sequence'; + state.result = _result; + return true; + } + return false; +} + +function readBlockMapping(state, nodeIndent, flowIndent) { + var following, + allowCompact, + _line, + _keyLine, + _keyLineStart, + _keyPos, + _tag = state.tag, + _anchor = state.anchor, + _result = {}, + overridableKeys = Object.create(null), + keyTag = null, + keyNode = null, + valueNode = null, + atExplicitKey = false, + detected = false, + ch; + + // there is a leading tab before this token, so it can't be a block sequence/mapping; + // it can still be flow sequence/mapping or a scalar + if (state.firstTabInLine !== -1) return false; + + if (state.anchor !== null) { + state.anchorMap[state.anchor] = _result; + } + + ch = state.input.charCodeAt(state.position); + + while (ch !== 0) { + if (!atExplicitKey && state.firstTabInLine !== -1) { + state.position = state.firstTabInLine; + throwError(state, 'tab characters must not be used in indentation'); + } + + following = state.input.charCodeAt(state.position + 1); + _line = state.line; // Save the current line. + + // + // Explicit notation case. There are two separate blocks: + // first for the key (denoted by "?") and second for the value (denoted by ":") + // + if ((ch === 0x3F/* ? */ || ch === 0x3A/* : */) && is_WS_OR_EOL(following)) { + + if (ch === 0x3F/* ? */) { + if (atExplicitKey) { + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos); + keyTag = keyNode = valueNode = null; + } + + detected = true; + atExplicitKey = true; + allowCompact = true; + + } else if (atExplicitKey) { + // i.e. 0x3A/* : */ === character after the explicit key. + atExplicitKey = false; + allowCompact = true; + + } else { + throwError(state, 'incomplete explicit mapping pair; a key node is missed; or followed by a non-tabulated empty line'); + } + + state.position += 1; + ch = following; + + // + // Implicit notation case. Flow-style node as the key first, then ":", and the value. + // + } else { + _keyLine = state.line; + _keyLineStart = state.lineStart; + _keyPos = state.position; + + if (!composeNode(state, flowIndent, CONTEXT_FLOW_OUT, false, true)) { + // Neither implicit nor explicit notation. + // Reading is done. Go to the epilogue. + break; + } + + if (state.line === _line) { + ch = state.input.charCodeAt(state.position); + + while (is_WHITE_SPACE(ch)) { + ch = state.input.charCodeAt(++state.position); + } + + if (ch === 0x3A/* : */) { + ch = state.input.charCodeAt(++state.position); + + if (!is_WS_OR_EOL(ch)) { + throwError(state, 'a whitespace character is expected after the key-value separator within a block mapping'); + } + + if (atExplicitKey) { + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos); + keyTag = keyNode = valueNode = null; + } + + detected = true; + atExplicitKey = false; + allowCompact = false; + keyTag = state.tag; + keyNode = state.result; + + } else if (detected) { + throwError(state, 'can not read an implicit mapping pair; a colon is missed'); + + } else { + state.tag = _tag; + state.anchor = _anchor; + return true; // Keep the result of `composeNode`. + } + + } else if (detected) { + throwError(state, 'can not read a block mapping entry; a multiline key may not be an implicit key'); + + } else { + state.tag = _tag; + state.anchor = _anchor; + return true; // Keep the result of `composeNode`. + } + } + + // + // Common reading code for both explicit and implicit notations. + // + if (state.line === _line || state.lineIndent > nodeIndent) { + if (atExplicitKey) { + _keyLine = state.line; + _keyLineStart = state.lineStart; + _keyPos = state.position; + } + + if (composeNode(state, nodeIndent, CONTEXT_BLOCK_OUT, true, allowCompact)) { + if (atExplicitKey) { + keyNode = state.result; + } else { + valueNode = state.result; + } + } + + if (!atExplicitKey) { + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _keyLine, _keyLineStart, _keyPos); + keyTag = keyNode = valueNode = null; + } + + skipSeparationSpace(state, true, -1); + ch = state.input.charCodeAt(state.position); + } + + if ((state.line === _line || state.lineIndent > nodeIndent) && (ch !== 0)) { + throwError(state, 'bad indentation of a mapping entry'); + } else if (state.lineIndent < nodeIndent) { + break; + } + } + + // + // Epilogue. + // + + // Special case: last mapping's node contains only the key in explicit notation. + if (atExplicitKey) { + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null, _keyLine, _keyLineStart, _keyPos); + } + + // Expose the resulting mapping. + if (detected) { + state.tag = _tag; + state.anchor = _anchor; + state.kind = 'mapping'; + state.result = _result; + } + + return detected; +} + +function readTagProperty(state) { + var _position, + isVerbatim = false, + isNamed = false, + tagHandle, + tagName, + ch; + + ch = state.input.charCodeAt(state.position); + + if (ch !== 0x21/* ! */) return false; + + if (state.tag !== null) { + throwError(state, 'duplication of a tag property'); + } + + ch = state.input.charCodeAt(++state.position); + + if (ch === 0x3C/* < */) { + isVerbatim = true; + ch = state.input.charCodeAt(++state.position); + + } else if (ch === 0x21/* ! */) { + isNamed = true; + tagHandle = '!!'; + ch = state.input.charCodeAt(++state.position); + + } else { + tagHandle = '!'; + } + + _position = state.position; + + if (isVerbatim) { + do { ch = state.input.charCodeAt(++state.position); } + while (ch !== 0 && ch !== 0x3E/* > */); + + if (state.position < state.length) { + tagName = state.input.slice(_position, state.position); + ch = state.input.charCodeAt(++state.position); + } else { + throwError(state, 'unexpected end of the stream within a verbatim tag'); + } + } else { + while (ch !== 0 && !is_WS_OR_EOL(ch)) { + + if (ch === 0x21/* ! */) { + if (!isNamed) { + tagHandle = state.input.slice(_position - 1, state.position + 1); + + if (!PATTERN_TAG_HANDLE.test(tagHandle)) { + throwError(state, 'named tag handle cannot contain such characters'); + } + + isNamed = true; + _position = state.position + 1; + } else { + throwError(state, 'tag suffix cannot contain exclamation marks'); + } + } + + ch = state.input.charCodeAt(++state.position); + } + + tagName = state.input.slice(_position, state.position); + + if (PATTERN_FLOW_INDICATORS.test(tagName)) { + throwError(state, 'tag suffix cannot contain flow indicator characters'); + } + } + + if (tagName && !PATTERN_TAG_URI.test(tagName)) { + throwError(state, 'tag name cannot contain such characters: ' + tagName); + } + + try { + tagName = decodeURIComponent(tagName); + } catch (err) { + throwError(state, 'tag name is malformed: ' + tagName); + } + + if (isVerbatim) { + state.tag = tagName; + + } else if (_hasOwnProperty.call(state.tagMap, tagHandle)) { + state.tag = state.tagMap[tagHandle] + tagName; + + } else if (tagHandle === '!') { + state.tag = '!' + tagName; + + } else if (tagHandle === '!!') { + state.tag = 'tag:yaml.org,2002:' + tagName; + + } else { + throwError(state, 'undeclared tag handle "' + tagHandle + '"'); + } + + return true; +} + +function readAnchorProperty(state) { + var _position, + ch; + + ch = state.input.charCodeAt(state.position); + + if (ch !== 0x26/* & */) return false; + + if (state.anchor !== null) { + throwError(state, 'duplication of an anchor property'); + } + + ch = state.input.charCodeAt(++state.position); + _position = state.position; + + while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) { + ch = state.input.charCodeAt(++state.position); + } + + if (state.position === _position) { + throwError(state, 'name of an anchor node must contain at least one character'); + } + + state.anchor = state.input.slice(_position, state.position); + return true; +} + +function readAlias(state) { + var _position, alias, + ch; + + ch = state.input.charCodeAt(state.position); + + if (ch !== 0x2A/* * */) return false; + + ch = state.input.charCodeAt(++state.position); + _position = state.position; + + while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) { + ch = state.input.charCodeAt(++state.position); + } + + if (state.position === _position) { + throwError(state, 'name of an alias node must contain at least one character'); + } + + alias = state.input.slice(_position, state.position); + + if (!_hasOwnProperty.call(state.anchorMap, alias)) { + throwError(state, 'unidentified alias "' + alias + '"'); + } + + state.result = state.anchorMap[alias]; + skipSeparationSpace(state, true, -1); + return true; +} + +function composeNode(state, parentIndent, nodeContext, allowToSeek, allowCompact) { + var allowBlockStyles, + allowBlockScalars, + allowBlockCollections, + indentStatus = 1, // 1: this>parent, 0: this=parent, -1: this parentIndent) { + indentStatus = 1; + } else if (state.lineIndent === parentIndent) { + indentStatus = 0; + } else if (state.lineIndent < parentIndent) { + indentStatus = -1; + } + } + } + + if (indentStatus === 1) { + while (readTagProperty(state) || readAnchorProperty(state)) { + if (skipSeparationSpace(state, true, -1)) { + atNewLine = true; + allowBlockCollections = allowBlockStyles; + + if (state.lineIndent > parentIndent) { + indentStatus = 1; + } else if (state.lineIndent === parentIndent) { + indentStatus = 0; + } else if (state.lineIndent < parentIndent) { + indentStatus = -1; + } + } else { + allowBlockCollections = false; + } + } + } + + if (allowBlockCollections) { + allowBlockCollections = atNewLine || allowCompact; + } + + if (indentStatus === 1 || CONTEXT_BLOCK_OUT === nodeContext) { + if (CONTEXT_FLOW_IN === nodeContext || CONTEXT_FLOW_OUT === nodeContext) { + flowIndent = parentIndent; + } else { + flowIndent = parentIndent + 1; + } + + blockIndent = state.position - state.lineStart; + + if (indentStatus === 1) { + if (allowBlockCollections && + (readBlockSequence(state, blockIndent) || + readBlockMapping(state, blockIndent, flowIndent)) || + readFlowCollection(state, flowIndent)) { + hasContent = true; + } else { + if ((allowBlockScalars && readBlockScalar(state, flowIndent)) || + readSingleQuotedScalar(state, flowIndent) || + readDoubleQuotedScalar(state, flowIndent)) { + hasContent = true; + + } else if (readAlias(state)) { + hasContent = true; + + if (state.tag !== null || state.anchor !== null) { + throwError(state, 'alias node should not have any properties'); + } + + } else if (readPlainScalar(state, flowIndent, CONTEXT_FLOW_IN === nodeContext)) { + hasContent = true; + + if (state.tag === null) { + state.tag = '?'; + } + } + + if (state.anchor !== null) { + state.anchorMap[state.anchor] = state.result; + } + } + } else if (indentStatus === 0) { + // Special case: block sequences are allowed to have same indentation level as the parent. + // http://www.yaml.org/spec/1.2/spec.html#id2799784 + hasContent = allowBlockCollections && readBlockSequence(state, blockIndent); + } + } + + if (state.tag === null) { + if (state.anchor !== null) { + state.anchorMap[state.anchor] = state.result; + } + + } else if (state.tag === '?') { + // Implicit resolving is not allowed for non-scalar types, and '?' + // non-specific tag is only automatically assigned to plain scalars. + // + // We only need to check kind conformity in case user explicitly assigns '?' + // tag, for example like this: "! [0]" + // + if (state.result !== null && state.kind !== 'scalar') { + throwError(state, 'unacceptable node kind for ! tag; it should be "scalar", not "' + state.kind + '"'); + } + + for (typeIndex = 0, typeQuantity = state.implicitTypes.length; typeIndex < typeQuantity; typeIndex += 1) { + type = state.implicitTypes[typeIndex]; + + if (type.resolve(state.result)) { // `state.result` updated in resolver if matched + state.result = type.construct(state.result); + state.tag = type.tag; + if (state.anchor !== null) { + state.anchorMap[state.anchor] = state.result; + } + break; + } + } + } else if (state.tag !== '!') { + if (_hasOwnProperty.call(state.typeMap[state.kind || 'fallback'], state.tag)) { + type = state.typeMap[state.kind || 'fallback'][state.tag]; + } else { + // looking for multi type + type = null; + typeList = state.typeMap.multi[state.kind || 'fallback']; + + for (typeIndex = 0, typeQuantity = typeList.length; typeIndex < typeQuantity; typeIndex += 1) { + if (state.tag.slice(0, typeList[typeIndex].tag.length) === typeList[typeIndex].tag) { + type = typeList[typeIndex]; + break; + } + } + } + + if (!type) { + throwError(state, 'unknown tag !<' + state.tag + '>'); + } + + if (state.result !== null && type.kind !== state.kind) { + throwError(state, 'unacceptable node kind for !<' + state.tag + '> tag; it should be "' + type.kind + '", not "' + state.kind + '"'); + } + + if (!type.resolve(state.result, state.tag)) { // `state.result` updated in resolver if matched + throwError(state, 'cannot resolve a node with !<' + state.tag + '> explicit tag'); + } else { + state.result = type.construct(state.result, state.tag); + if (state.anchor !== null) { + state.anchorMap[state.anchor] = state.result; + } + } + } + + if (state.listener !== null) { + state.listener('close', state); + } + return state.tag !== null || state.anchor !== null || hasContent; +} + +function readDocument(state) { + var documentStart = state.position, + _position, + directiveName, + directiveArgs, + hasDirectives = false, + ch; + + state.version = null; + state.checkLineBreaks = state.legacy; + state.tagMap = Object.create(null); + state.anchorMap = Object.create(null); + + while ((ch = state.input.charCodeAt(state.position)) !== 0) { + skipSeparationSpace(state, true, -1); + + ch = state.input.charCodeAt(state.position); + + if (state.lineIndent > 0 || ch !== 0x25/* % */) { + break; + } + + hasDirectives = true; + ch = state.input.charCodeAt(++state.position); + _position = state.position; + + while (ch !== 0 && !is_WS_OR_EOL(ch)) { + ch = state.input.charCodeAt(++state.position); + } + + directiveName = state.input.slice(_position, state.position); + directiveArgs = []; + + if (directiveName.length < 1) { + throwError(state, 'directive name must not be less than one character in length'); + } + + while (ch !== 0) { + while (is_WHITE_SPACE(ch)) { + ch = state.input.charCodeAt(++state.position); + } + + if (ch === 0x23/* # */) { + do { ch = state.input.charCodeAt(++state.position); } + while (ch !== 0 && !is_EOL(ch)); + break; + } + + if (is_EOL(ch)) break; + + _position = state.position; + + while (ch !== 0 && !is_WS_OR_EOL(ch)) { + ch = state.input.charCodeAt(++state.position); + } + + directiveArgs.push(state.input.slice(_position, state.position)); + } + + if (ch !== 0) readLineBreak(state); + + if (_hasOwnProperty.call(directiveHandlers, directiveName)) { + directiveHandlers[directiveName](state, directiveName, directiveArgs); + } else { + throwWarning(state, 'unknown document directive "' + directiveName + '"'); + } + } + + skipSeparationSpace(state, true, -1); + + if (state.lineIndent === 0 && + state.input.charCodeAt(state.position) === 0x2D/* - */ && + state.input.charCodeAt(state.position + 1) === 0x2D/* - */ && + state.input.charCodeAt(state.position + 2) === 0x2D/* - */) { + state.position += 3; + skipSeparationSpace(state, true, -1); + + } else if (hasDirectives) { + throwError(state, 'directives end mark is expected'); + } + + composeNode(state, state.lineIndent - 1, CONTEXT_BLOCK_OUT, false, true); + skipSeparationSpace(state, true, -1); + + if (state.checkLineBreaks && + PATTERN_NON_ASCII_LINE_BREAKS.test(state.input.slice(documentStart, state.position))) { + throwWarning(state, 'non-ASCII line breaks are interpreted as content'); + } + + state.documents.push(state.result); + + if (state.position === state.lineStart && testDocumentSeparator(state)) { + + if (state.input.charCodeAt(state.position) === 0x2E/* . */) { + state.position += 3; + skipSeparationSpace(state, true, -1); + } + return; + } + + if (state.position < (state.length - 1)) { + throwError(state, 'end of the stream or a document separator is expected'); + } else { + return; + } +} + + +function loadDocuments(input, options) { + input = String(input); + options = options || {}; + + if (input.length !== 0) { + + // Add tailing `\n` if not exists + if (input.charCodeAt(input.length - 1) !== 0x0A/* LF */ && + input.charCodeAt(input.length - 1) !== 0x0D/* CR */) { + input += '\n'; + } + + // Strip BOM + if (input.charCodeAt(0) === 0xFEFF) { + input = input.slice(1); + } + } + + var state = new State(input, options); + + var nullpos = input.indexOf('\0'); + + if (nullpos !== -1) { + state.position = nullpos; + throwError(state, 'null byte is not allowed in input'); + } + + // Use 0 as string terminator. That significantly simplifies bounds check. + state.input += '\0'; + + while (state.input.charCodeAt(state.position) === 0x20/* Space */) { + state.lineIndent += 1; + state.position += 1; + } + + while (state.position < (state.length - 1)) { + readDocument(state); + } + + return state.documents; +} + + +function loadAll(input, iterator, options) { + if (iterator !== null && typeof iterator === 'object' && typeof options === 'undefined') { + options = iterator; + iterator = null; + } + + var documents = loadDocuments(input, options); + + if (typeof iterator !== 'function') { + return documents; + } + + for (var index = 0, length = documents.length; index < length; index += 1) { + iterator(documents[index]); + } +} + + +function load(input, options) { + var documents = loadDocuments(input, options); + + if (documents.length === 0) { + /*eslint-disable no-undefined*/ + return undefined; + } else if (documents.length === 1) { + return documents[0]; + } + throw new YAMLException('expected a single document in the stream, but found more'); +} + + +module.exports.loadAll = loadAll; +module.exports.load = load; + + +/***/ }), + +/***/ 2046: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +/*eslint-disable max-len*/ + +var YAMLException = __nccwpck_require__(1248); +var Type = __nccwpck_require__(9557); + + +function compileList(schema, name) { + var result = []; + + schema[name].forEach(function (currentType) { + var newIndex = result.length; + + result.forEach(function (previousType, previousIndex) { + if (previousType.tag === currentType.tag && + previousType.kind === currentType.kind && + previousType.multi === currentType.multi) { + + newIndex = previousIndex; + } + }); + + result[newIndex] = currentType; + }); + + return result; +} + + +function compileMap(/* lists... */) { + var result = { + scalar: {}, + sequence: {}, + mapping: {}, + fallback: {}, + multi: { + scalar: [], + sequence: [], + mapping: [], + fallback: [] + } + }, index, length; + + function collectType(type) { + if (type.multi) { + result.multi[type.kind].push(type); + result.multi['fallback'].push(type); + } else { + result[type.kind][type.tag] = result['fallback'][type.tag] = type; + } + } + + for (index = 0, length = arguments.length; index < length; index += 1) { + arguments[index].forEach(collectType); + } + return result; +} + + +function Schema(definition) { + return this.extend(definition); +} + + +Schema.prototype.extend = function extend(definition) { + var implicit = []; + var explicit = []; + + if (definition instanceof Type) { + // Schema.extend(type) + explicit.push(definition); + + } else if (Array.isArray(definition)) { + // Schema.extend([ type1, type2, ... ]) + explicit = explicit.concat(definition); + + } else if (definition && (Array.isArray(definition.implicit) || Array.isArray(definition.explicit))) { + // Schema.extend({ explicit: [ type1, type2, ... ], implicit: [ type1, type2, ... ] }) + if (definition.implicit) implicit = implicit.concat(definition.implicit); + if (definition.explicit) explicit = explicit.concat(definition.explicit); + + } else { + throw new YAMLException('Schema.extend argument should be a Type, [ Type ], ' + + 'or a schema definition ({ implicit: [...], explicit: [...] })'); + } + + implicit.forEach(function (type) { + if (!(type instanceof Type)) { + throw new YAMLException('Specified list of YAML types (or a single Type object) contains a non-Type object.'); + } + + if (type.loadKind && type.loadKind !== 'scalar') { + throw new YAMLException('There is a non-scalar type in the implicit list of a schema. Implicit resolving of such types is not supported.'); + } + + if (type.multi) { + throw new YAMLException('There is a multi type in the implicit list of a schema. Multi tags can only be listed as explicit.'); + } + }); + + explicit.forEach(function (type) { + if (!(type instanceof Type)) { + throw new YAMLException('Specified list of YAML types (or a single Type object) contains a non-Type object.'); + } + }); + + var result = Object.create(Schema.prototype); + + result.implicit = (this.implicit || []).concat(implicit); + result.explicit = (this.explicit || []).concat(explicit); + + result.compiledImplicit = compileList(result, 'implicit'); + result.compiledExplicit = compileList(result, 'explicit'); + result.compiledTypeMap = compileMap(result.compiledImplicit, result.compiledExplicit); + + return result; +}; + + +module.exports = Schema; + + +/***/ }), + +/***/ 5746: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Standard YAML's Core schema. +// http://www.yaml.org/spec/1.2/spec.html#id2804923 +// +// NOTE: JS-YAML does not support schema-specific tag resolution restrictions. +// So, Core schema has no distinctions from JSON schema is JS-YAML. + + + + + +module.exports = __nccwpck_require__(8927); + + +/***/ }), + +/***/ 7336: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// JS-YAML's default schema for `safeLoad` function. +// It is not described in the YAML specification. +// +// This schema is based on standard YAML's Core schema and includes most of +// extra types described at YAML tag repository. (http://yaml.org/type/) + + + + + +module.exports = (__nccwpck_require__(5746).extend)({ + implicit: [ + __nccwpck_require__(8966), + __nccwpck_require__(6854) + ], + explicit: [ + __nccwpck_require__(8149), + __nccwpck_require__(8649), + __nccwpck_require__(6267), + __nccwpck_require__(8758) + ] +}); + + +/***/ }), + +/***/ 9832: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Standard YAML's Failsafe schema. +// http://www.yaml.org/spec/1.2/spec.html#id2802346 + + + + + +var Schema = __nccwpck_require__(2046); + + +module.exports = new Schema({ + explicit: [ + __nccwpck_require__(3929), + __nccwpck_require__(7161), + __nccwpck_require__(7316) + ] +}); + + +/***/ }), + +/***/ 8927: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Standard YAML's JSON schema. +// http://www.yaml.org/spec/1.2/spec.html#id2803231 +// +// NOTE: JS-YAML does not support schema-specific tag resolution restrictions. +// So, this schema is not such strict as defined in the YAML specification. +// It allows numbers in binary notaion, use `Null` and `NULL` as `null`, etc. + + + + + +module.exports = (__nccwpck_require__(9832).extend)({ + implicit: [ + __nccwpck_require__(4333), + __nccwpck_require__(7296), + __nccwpck_require__(4652), + __nccwpck_require__(7584) + ] +}); + + +/***/ }), + +/***/ 9440: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + + +var common = __nccwpck_require__(9816); + + +// get snippet for a single line, respecting maxLength +function getLine(buffer, lineStart, lineEnd, position, maxLineLength) { + var head = ''; + var tail = ''; + var maxHalfLength = Math.floor(maxLineLength / 2) - 1; + + if (position - lineStart > maxHalfLength) { + head = ' ... '; + lineStart = position - maxHalfLength + head.length; + } + + if (lineEnd - position > maxHalfLength) { + tail = ' ...'; + lineEnd = position + maxHalfLength - tail.length; + } + + return { + str: head + buffer.slice(lineStart, lineEnd).replace(/\t/g, '→') + tail, + pos: position - lineStart + head.length // relative position + }; +} + + +function padStart(string, max) { + return common.repeat(' ', max - string.length) + string; +} + + +function makeSnippet(mark, options) { + options = Object.create(options || null); + + if (!mark.buffer) return null; + + if (!options.maxLength) options.maxLength = 79; + if (typeof options.indent !== 'number') options.indent = 1; + if (typeof options.linesBefore !== 'number') options.linesBefore = 3; + if (typeof options.linesAfter !== 'number') options.linesAfter = 2; + + var re = /\r?\n|\r|\0/g; + var lineStarts = [ 0 ]; + var lineEnds = []; + var match; + var foundLineNo = -1; + + while ((match = re.exec(mark.buffer))) { + lineEnds.push(match.index); + lineStarts.push(match.index + match[0].length); + + if (mark.position <= match.index && foundLineNo < 0) { + foundLineNo = lineStarts.length - 2; + } + } + + if (foundLineNo < 0) foundLineNo = lineStarts.length - 1; + + var result = '', i, line; + var lineNoLength = Math.min(mark.line + options.linesAfter, lineEnds.length).toString().length; + var maxLineLength = options.maxLength - (options.indent + lineNoLength + 3); + + for (i = 1; i <= options.linesBefore; i++) { + if (foundLineNo - i < 0) break; + line = getLine( + mark.buffer, + lineStarts[foundLineNo - i], + lineEnds[foundLineNo - i], + mark.position - (lineStarts[foundLineNo] - lineStarts[foundLineNo - i]), + maxLineLength + ); + result = common.repeat(' ', options.indent) + padStart((mark.line - i + 1).toString(), lineNoLength) + + ' | ' + line.str + '\n' + result; + } + + line = getLine(mark.buffer, lineStarts[foundLineNo], lineEnds[foundLineNo], mark.position, maxLineLength); + result += common.repeat(' ', options.indent) + padStart((mark.line + 1).toString(), lineNoLength) + + ' | ' + line.str + '\n'; + result += common.repeat('-', options.indent + lineNoLength + 3 + line.pos) + '^' + '\n'; + + for (i = 1; i <= options.linesAfter; i++) { + if (foundLineNo + i >= lineEnds.length) break; + line = getLine( + mark.buffer, + lineStarts[foundLineNo + i], + lineEnds[foundLineNo + i], + mark.position - (lineStarts[foundLineNo] - lineStarts[foundLineNo + i]), + maxLineLength + ); + result += common.repeat(' ', options.indent) + padStart((mark.line + i + 1).toString(), lineNoLength) + + ' | ' + line.str + '\n'; + } + + return result.replace(/\n$/, ''); +} + + +module.exports = makeSnippet; + + +/***/ }), + +/***/ 9557: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var YAMLException = __nccwpck_require__(1248); + +var TYPE_CONSTRUCTOR_OPTIONS = [ + 'kind', + 'multi', + 'resolve', + 'construct', + 'instanceOf', + 'predicate', + 'represent', + 'representName', + 'defaultStyle', + 'styleAliases' +]; + +var YAML_NODE_KINDS = [ + 'scalar', + 'sequence', + 'mapping' +]; + +function compileStyleAliases(map) { + var result = {}; + + if (map !== null) { + Object.keys(map).forEach(function (style) { + map[style].forEach(function (alias) { + result[String(alias)] = style; + }); + }); + } + + return result; +} + +function Type(tag, options) { + options = options || {}; + + Object.keys(options).forEach(function (name) { + if (TYPE_CONSTRUCTOR_OPTIONS.indexOf(name) === -1) { + throw new YAMLException('Unknown option "' + name + '" is met in definition of "' + tag + '" YAML type.'); + } + }); + + // TODO: Add tag format check. + this.options = options; // keep original options in case user wants to extend this type later + this.tag = tag; + this.kind = options['kind'] || null; + this.resolve = options['resolve'] || function () { return true; }; + this.construct = options['construct'] || function (data) { return data; }; + this.instanceOf = options['instanceOf'] || null; + this.predicate = options['predicate'] || null; + this.represent = options['represent'] || null; + this.representName = options['representName'] || null; + this.defaultStyle = options['defaultStyle'] || null; + this.multi = options['multi'] || false; + this.styleAliases = compileStyleAliases(options['styleAliases'] || null); + + if (YAML_NODE_KINDS.indexOf(this.kind) === -1) { + throw new YAMLException('Unknown kind "' + this.kind + '" is specified for "' + tag + '" YAML type.'); + } +} + +module.exports = Type; + + +/***/ }), + +/***/ 8149: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +/*eslint-disable no-bitwise*/ + + +var Type = __nccwpck_require__(9557); + + +// [ 64, 65, 66 ] -> [ padding, CR, LF ] +var BASE64_MAP = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\n\r'; + + +function resolveYamlBinary(data) { + if (data === null) return false; + + var code, idx, bitlen = 0, max = data.length, map = BASE64_MAP; + + // Convert one by one. + for (idx = 0; idx < max; idx++) { + code = map.indexOf(data.charAt(idx)); + + // Skip CR/LF + if (code > 64) continue; + + // Fail on illegal characters + if (code < 0) return false; + + bitlen += 6; + } + + // If there are any bits left, source was corrupted + return (bitlen % 8) === 0; +} + +function constructYamlBinary(data) { + var idx, tailbits, + input = data.replace(/[\r\n=]/g, ''), // remove CR/LF & padding to simplify scan + max = input.length, + map = BASE64_MAP, + bits = 0, + result = []; + + // Collect by 6*4 bits (3 bytes) + + for (idx = 0; idx < max; idx++) { + if ((idx % 4 === 0) && idx) { + result.push((bits >> 16) & 0xFF); + result.push((bits >> 8) & 0xFF); + result.push(bits & 0xFF); + } + + bits = (bits << 6) | map.indexOf(input.charAt(idx)); + } + + // Dump tail + + tailbits = (max % 4) * 6; + + if (tailbits === 0) { + result.push((bits >> 16) & 0xFF); + result.push((bits >> 8) & 0xFF); + result.push(bits & 0xFF); + } else if (tailbits === 18) { + result.push((bits >> 10) & 0xFF); + result.push((bits >> 2) & 0xFF); + } else if (tailbits === 12) { + result.push((bits >> 4) & 0xFF); + } + + return new Uint8Array(result); +} + +function representYamlBinary(object /*, style*/) { + var result = '', bits = 0, idx, tail, + max = object.length, + map = BASE64_MAP; + + // Convert every three bytes to 4 ASCII characters. + + for (idx = 0; idx < max; idx++) { + if ((idx % 3 === 0) && idx) { + result += map[(bits >> 18) & 0x3F]; + result += map[(bits >> 12) & 0x3F]; + result += map[(bits >> 6) & 0x3F]; + result += map[bits & 0x3F]; + } + + bits = (bits << 8) + object[idx]; + } + + // Dump tail + + tail = max % 3; + + if (tail === 0) { + result += map[(bits >> 18) & 0x3F]; + result += map[(bits >> 12) & 0x3F]; + result += map[(bits >> 6) & 0x3F]; + result += map[bits & 0x3F]; + } else if (tail === 2) { + result += map[(bits >> 10) & 0x3F]; + result += map[(bits >> 4) & 0x3F]; + result += map[(bits << 2) & 0x3F]; + result += map[64]; + } else if (tail === 1) { + result += map[(bits >> 2) & 0x3F]; + result += map[(bits << 4) & 0x3F]; + result += map[64]; + result += map[64]; + } + + return result; +} + +function isBinary(obj) { + return Object.prototype.toString.call(obj) === '[object Uint8Array]'; +} + +module.exports = new Type('tag:yaml.org,2002:binary', { + kind: 'scalar', + resolve: resolveYamlBinary, + construct: constructYamlBinary, + predicate: isBinary, + represent: representYamlBinary +}); + + +/***/ }), + +/***/ 7296: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var Type = __nccwpck_require__(9557); + +function resolveYamlBoolean(data) { + if (data === null) return false; + + var max = data.length; + + return (max === 4 && (data === 'true' || data === 'True' || data === 'TRUE')) || + (max === 5 && (data === 'false' || data === 'False' || data === 'FALSE')); +} + +function constructYamlBoolean(data) { + return data === 'true' || + data === 'True' || + data === 'TRUE'; +} + +function isBoolean(object) { + return Object.prototype.toString.call(object) === '[object Boolean]'; +} + +module.exports = new Type('tag:yaml.org,2002:bool', { + kind: 'scalar', + resolve: resolveYamlBoolean, + construct: constructYamlBoolean, + predicate: isBoolean, + represent: { + lowercase: function (object) { return object ? 'true' : 'false'; }, + uppercase: function (object) { return object ? 'TRUE' : 'FALSE'; }, + camelcase: function (object) { return object ? 'True' : 'False'; } + }, + defaultStyle: 'lowercase' +}); + + +/***/ }), + +/***/ 7584: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var common = __nccwpck_require__(9816); +var Type = __nccwpck_require__(9557); + +var YAML_FLOAT_PATTERN = new RegExp( + // 2.5e4, 2.5 and integers + '^(?:[-+]?(?:[0-9][0-9_]*)(?:\\.[0-9_]*)?(?:[eE][-+]?[0-9]+)?' + + // .2e4, .2 + // special case, seems not from spec + '|\\.[0-9_]+(?:[eE][-+]?[0-9]+)?' + + // .inf + '|[-+]?\\.(?:inf|Inf|INF)' + + // .nan + '|\\.(?:nan|NaN|NAN))$'); + +function resolveYamlFloat(data) { + if (data === null) return false; + + if (!YAML_FLOAT_PATTERN.test(data) || + // Quick hack to not allow integers end with `_` + // Probably should update regexp & check speed + data[data.length - 1] === '_') { + return false; + } + + return true; +} + +function constructYamlFloat(data) { + var value, sign; + + value = data.replace(/_/g, '').toLowerCase(); + sign = value[0] === '-' ? -1 : 1; + + if ('+-'.indexOf(value[0]) >= 0) { + value = value.slice(1); + } + + if (value === '.inf') { + return (sign === 1) ? Number.POSITIVE_INFINITY : Number.NEGATIVE_INFINITY; + + } else if (value === '.nan') { + return NaN; + } + return sign * parseFloat(value, 10); +} + + +var SCIENTIFIC_WITHOUT_DOT = /^[-+]?[0-9]+e/; + +function representYamlFloat(object, style) { + var res; + + if (isNaN(object)) { + switch (style) { + case 'lowercase': return '.nan'; + case 'uppercase': return '.NAN'; + case 'camelcase': return '.NaN'; + } + } else if (Number.POSITIVE_INFINITY === object) { + switch (style) { + case 'lowercase': return '.inf'; + case 'uppercase': return '.INF'; + case 'camelcase': return '.Inf'; + } + } else if (Number.NEGATIVE_INFINITY === object) { + switch (style) { + case 'lowercase': return '-.inf'; + case 'uppercase': return '-.INF'; + case 'camelcase': return '-.Inf'; + } + } else if (common.isNegativeZero(object)) { + return '-0.0'; + } + + res = object.toString(10); + + // JS stringifier can build scientific format without dots: 5e-100, + // while YAML requres dot: 5.e-100. Fix it with simple hack + + return SCIENTIFIC_WITHOUT_DOT.test(res) ? res.replace('e', '.e') : res; +} + +function isFloat(object) { + return (Object.prototype.toString.call(object) === '[object Number]') && + (object % 1 !== 0 || common.isNegativeZero(object)); +} + +module.exports = new Type('tag:yaml.org,2002:float', { + kind: 'scalar', + resolve: resolveYamlFloat, + construct: constructYamlFloat, + predicate: isFloat, + represent: representYamlFloat, + defaultStyle: 'lowercase' +}); + + +/***/ }), + +/***/ 4652: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var common = __nccwpck_require__(9816); +var Type = __nccwpck_require__(9557); + +function isHexCode(c) { + return ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) || + ((0x41/* A */ <= c) && (c <= 0x46/* F */)) || + ((0x61/* a */ <= c) && (c <= 0x66/* f */)); +} + +function isOctCode(c) { + return ((0x30/* 0 */ <= c) && (c <= 0x37/* 7 */)); +} + +function isDecCode(c) { + return ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)); +} + +function resolveYamlInteger(data) { + if (data === null) return false; + + var max = data.length, + index = 0, + hasDigits = false, + ch; + + if (!max) return false; + + ch = data[index]; + + // sign + if (ch === '-' || ch === '+') { + ch = data[++index]; + } + + if (ch === '0') { + // 0 + if (index + 1 === max) return true; + ch = data[++index]; + + // base 2, base 8, base 16 + + if (ch === 'b') { + // base 2 + index++; + + for (; index < max; index++) { + ch = data[index]; + if (ch === '_') continue; + if (ch !== '0' && ch !== '1') return false; + hasDigits = true; + } + return hasDigits && ch !== '_'; + } + + + if (ch === 'x') { + // base 16 + index++; + + for (; index < max; index++) { + ch = data[index]; + if (ch === '_') continue; + if (!isHexCode(data.charCodeAt(index))) return false; + hasDigits = true; + } + return hasDigits && ch !== '_'; + } + + + if (ch === 'o') { + // base 8 + index++; + + for (; index < max; index++) { + ch = data[index]; + if (ch === '_') continue; + if (!isOctCode(data.charCodeAt(index))) return false; + hasDigits = true; + } + return hasDigits && ch !== '_'; + } + } + + // base 10 (except 0) + + // value should not start with `_`; + if (ch === '_') return false; + + for (; index < max; index++) { + ch = data[index]; + if (ch === '_') continue; + if (!isDecCode(data.charCodeAt(index))) { + return false; + } + hasDigits = true; + } + + // Should have digits and should not end with `_` + if (!hasDigits || ch === '_') return false; + + return true; +} + +function constructYamlInteger(data) { + var value = data, sign = 1, ch; + + if (value.indexOf('_') !== -1) { + value = value.replace(/_/g, ''); + } + + ch = value[0]; + + if (ch === '-' || ch === '+') { + if (ch === '-') sign = -1; + value = value.slice(1); + ch = value[0]; + } + + if (value === '0') return 0; + + if (ch === '0') { + if (value[1] === 'b') return sign * parseInt(value.slice(2), 2); + if (value[1] === 'x') return sign * parseInt(value.slice(2), 16); + if (value[1] === 'o') return sign * parseInt(value.slice(2), 8); + } + + return sign * parseInt(value, 10); +} + +function isInteger(object) { + return (Object.prototype.toString.call(object)) === '[object Number]' && + (object % 1 === 0 && !common.isNegativeZero(object)); +} + +module.exports = new Type('tag:yaml.org,2002:int', { + kind: 'scalar', + resolve: resolveYamlInteger, + construct: constructYamlInteger, + predicate: isInteger, + represent: { + binary: function (obj) { return obj >= 0 ? '0b' + obj.toString(2) : '-0b' + obj.toString(2).slice(1); }, + octal: function (obj) { return obj >= 0 ? '0o' + obj.toString(8) : '-0o' + obj.toString(8).slice(1); }, + decimal: function (obj) { return obj.toString(10); }, + /* eslint-disable max-len */ + hexadecimal: function (obj) { return obj >= 0 ? '0x' + obj.toString(16).toUpperCase() : '-0x' + obj.toString(16).toUpperCase().slice(1); } + }, + defaultStyle: 'decimal', + styleAliases: { + binary: [ 2, 'bin' ], + octal: [ 8, 'oct' ], + decimal: [ 10, 'dec' ], + hexadecimal: [ 16, 'hex' ] + } +}); + + +/***/ }), + +/***/ 7316: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var Type = __nccwpck_require__(9557); + +module.exports = new Type('tag:yaml.org,2002:map', { + kind: 'mapping', + construct: function (data) { return data !== null ? data : {}; } +}); + + +/***/ }), + +/***/ 6854: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var Type = __nccwpck_require__(9557); + +function resolveYamlMerge(data) { + return data === '<<' || data === null; +} + +module.exports = new Type('tag:yaml.org,2002:merge', { + kind: 'scalar', + resolve: resolveYamlMerge +}); + + +/***/ }), + +/***/ 4333: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var Type = __nccwpck_require__(9557); + +function resolveYamlNull(data) { + if (data === null) return true; + + var max = data.length; + + return (max === 1 && data === '~') || + (max === 4 && (data === 'null' || data === 'Null' || data === 'NULL')); +} + +function constructYamlNull() { + return null; +} + +function isNull(object) { + return object === null; +} + +module.exports = new Type('tag:yaml.org,2002:null', { + kind: 'scalar', + resolve: resolveYamlNull, + construct: constructYamlNull, + predicate: isNull, + represent: { + canonical: function () { return '~'; }, + lowercase: function () { return 'null'; }, + uppercase: function () { return 'NULL'; }, + camelcase: function () { return 'Null'; }, + empty: function () { return ''; } + }, + defaultStyle: 'lowercase' +}); + + +/***/ }), + +/***/ 8649: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var Type = __nccwpck_require__(9557); + +var _hasOwnProperty = Object.prototype.hasOwnProperty; +var _toString = Object.prototype.toString; + +function resolveYamlOmap(data) { + if (data === null) return true; + + var objectKeys = [], index, length, pair, pairKey, pairHasKey, + object = data; + + for (index = 0, length = object.length; index < length; index += 1) { + pair = object[index]; + pairHasKey = false; + + if (_toString.call(pair) !== '[object Object]') return false; + + for (pairKey in pair) { + if (_hasOwnProperty.call(pair, pairKey)) { + if (!pairHasKey) pairHasKey = true; + else return false; + } + } + + if (!pairHasKey) return false; + + if (objectKeys.indexOf(pairKey) === -1) objectKeys.push(pairKey); + else return false; + } + + return true; +} + +function constructYamlOmap(data) { + return data !== null ? data : []; +} + +module.exports = new Type('tag:yaml.org,2002:omap', { + kind: 'sequence', + resolve: resolveYamlOmap, + construct: constructYamlOmap +}); + + +/***/ }), + +/***/ 6267: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var Type = __nccwpck_require__(9557); + +var _toString = Object.prototype.toString; + +function resolveYamlPairs(data) { + if (data === null) return true; + + var index, length, pair, keys, result, + object = data; + + result = new Array(object.length); + + for (index = 0, length = object.length; index < length; index += 1) { + pair = object[index]; + + if (_toString.call(pair) !== '[object Object]') return false; + + keys = Object.keys(pair); + + if (keys.length !== 1) return false; + + result[index] = [ keys[0], pair[keys[0]] ]; + } + + return true; +} + +function constructYamlPairs(data) { + if (data === null) return []; + + var index, length, pair, keys, result, + object = data; + + result = new Array(object.length); + + for (index = 0, length = object.length; index < length; index += 1) { + pair = object[index]; + + keys = Object.keys(pair); + + result[index] = [ keys[0], pair[keys[0]] ]; + } + + return result; +} + +module.exports = new Type('tag:yaml.org,2002:pairs', { + kind: 'sequence', + resolve: resolveYamlPairs, + construct: constructYamlPairs +}); + + +/***/ }), + +/***/ 7161: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var Type = __nccwpck_require__(9557); + +module.exports = new Type('tag:yaml.org,2002:seq', { + kind: 'sequence', + construct: function (data) { return data !== null ? data : []; } +}); + + +/***/ }), + +/***/ 8758: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var Type = __nccwpck_require__(9557); + +var _hasOwnProperty = Object.prototype.hasOwnProperty; + +function resolveYamlSet(data) { + if (data === null) return true; + + var key, object = data; + + for (key in object) { + if (_hasOwnProperty.call(object, key)) { + if (object[key] !== null) return false; + } + } + + return true; +} + +function constructYamlSet(data) { + return data !== null ? data : {}; +} + +module.exports = new Type('tag:yaml.org,2002:set', { + kind: 'mapping', + resolve: resolveYamlSet, + construct: constructYamlSet +}); + + +/***/ }), + +/***/ 3929: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var Type = __nccwpck_require__(9557); + +module.exports = new Type('tag:yaml.org,2002:str', { + kind: 'scalar', + construct: function (data) { return data !== null ? data : ''; } +}); + + +/***/ }), + +/***/ 8966: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var Type = __nccwpck_require__(9557); + +var YAML_DATE_REGEXP = new RegExp( + '^([0-9][0-9][0-9][0-9])' + // [1] year + '-([0-9][0-9])' + // [2] month + '-([0-9][0-9])$'); // [3] day + +var YAML_TIMESTAMP_REGEXP = new RegExp( + '^([0-9][0-9][0-9][0-9])' + // [1] year + '-([0-9][0-9]?)' + // [2] month + '-([0-9][0-9]?)' + // [3] day + '(?:[Tt]|[ \\t]+)' + // ... + '([0-9][0-9]?)' + // [4] hour + ':([0-9][0-9])' + // [5] minute + ':([0-9][0-9])' + // [6] second + '(?:\\.([0-9]*))?' + // [7] fraction + '(?:[ \\t]*(Z|([-+])([0-9][0-9]?)' + // [8] tz [9] tz_sign [10] tz_hour + '(?::([0-9][0-9]))?))?$'); // [11] tz_minute + +function resolveYamlTimestamp(data) { + if (data === null) return false; + if (YAML_DATE_REGEXP.exec(data) !== null) return true; + if (YAML_TIMESTAMP_REGEXP.exec(data) !== null) return true; + return false; +} + +function constructYamlTimestamp(data) { + var match, year, month, day, hour, minute, second, fraction = 0, + delta = null, tz_hour, tz_minute, date; + + match = YAML_DATE_REGEXP.exec(data); + if (match === null) match = YAML_TIMESTAMP_REGEXP.exec(data); + + if (match === null) throw new Error('Date resolve error'); + + // match: [1] year [2] month [3] day + + year = +(match[1]); + month = +(match[2]) - 1; // JS month starts with 0 + day = +(match[3]); + + if (!match[4]) { // no hour + return new Date(Date.UTC(year, month, day)); + } + + // match: [4] hour [5] minute [6] second [7] fraction + + hour = +(match[4]); + minute = +(match[5]); + second = +(match[6]); + + if (match[7]) { + fraction = match[7].slice(0, 3); + while (fraction.length < 3) { // milli-seconds + fraction += '0'; + } + fraction = +fraction; + } + + // match: [8] tz [9] tz_sign [10] tz_hour [11] tz_minute + + if (match[9]) { + tz_hour = +(match[10]); + tz_minute = +(match[11] || 0); + delta = (tz_hour * 60 + tz_minute) * 60000; // delta in mili-seconds + if (match[9] === '-') delta = -delta; + } + + date = new Date(Date.UTC(year, month, day, hour, minute, second, fraction)); + + if (delta) date.setTime(date.getTime() - delta); + + return date; +} + +function representYamlTimestamp(object /*, style*/) { + return object.toISOString(); +} + +module.exports = new Type('tag:yaml.org,2002:timestamp', { + kind: 'scalar', + resolve: resolveYamlTimestamp, + construct: constructYamlTimestamp, + instanceOf: Date, + represent: representYamlTimestamp +}); + + +/***/ }), + +/***/ 207: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const color = __nccwpck_require__(4095) + +const { extendedTypeOf } = __nccwpck_require__(7616) + +const Theme = { + ' ' (s) { return s }, + '+': color.green, + '-': color.red +} + +const subcolorizeToCallback = function (options, key, diff, output, color, indent) { + let subvalue + const prefix = key ? `${key}: ` : '' + const subindent = indent + ' ' + + const outputElisions = (n) => { + const maxElisions = options.maxElisions === undefined ? Infinity : options.maxElisions + if (n < maxElisions) { + for (let i = 0; i < n; i++) { + output(' ', subindent + '...') + } + } else { + output(' ', subindent + `... (${n} entries)`) + } + } + + switch (extendedTypeOf(diff)) { + case 'object': + if (('__old' in diff) && ('__new' in diff) && (Object.keys(diff).length === 2)) { + subcolorizeToCallback(options, key, diff.__old, output, '-', indent) + return subcolorizeToCallback(options, key, diff.__new, output, '+', indent) + } else { + output(color, `${indent}${prefix}{`) + for (const subkey of Object.keys(diff)) { + let m + subvalue = diff[subkey] + if ((m = subkey.match(/^(.*)__deleted$/))) { + subcolorizeToCallback(options, m[1], subvalue, output, '-', subindent) + } else if ((m = subkey.match(/^(.*)__added$/))) { + subcolorizeToCallback(options, m[1], subvalue, output, '+', subindent) + } else { + subcolorizeToCallback(options, subkey, subvalue, output, color, subindent) + } + } + return output(color, `${indent}}`) + } + + case 'array': { + output(color, `${indent}${prefix}[`) + + let looksLikeDiff = true + for (const item of diff) { + if ((extendedTypeOf(item) !== 'array') || !((item.length === 2) || ((item.length === 1) && (item[0] === ' '))) || !(typeof (item[0]) === 'string') || (item[0].length !== 1) || !([' ', '-', '+', '~'].includes(item[0]))) { + looksLikeDiff = false + } + } + + if (looksLikeDiff) { + let op + let elisionCount = 0 + for ([op, subvalue] of diff) { + if (op === ' ' && subvalue == null) { + elisionCount++ + } else { + if (elisionCount > 0) { + outputElisions(elisionCount) + } + elisionCount = 0 + + if (![' ', '~', '+', '-'].includes(op)) { + throw new Error(`Unexpected op '${op}' in ${JSON.stringify(diff, null, 2)}`) + } + if (op === '~') { op = ' ' } + subcolorizeToCallback(options, '', subvalue, output, op, subindent) + } + } + if (elisionCount > 0) { + outputElisions(elisionCount) + } + } else { + for (subvalue of diff) { + subcolorizeToCallback(options, '', subvalue, output, color, subindent) + } + } + + return output(color, `${indent}]`) + } + + default: + if (diff === 0 || diff === null || diff === false || diff === '' || diff) { + return output(color, indent + prefix + JSON.stringify(diff)) + } + } +} + +const colorizeToCallback = (diff, options, output) => + subcolorizeToCallback(options, '', diff, output, ' ', '') + +const colorizeToArray = function (diff, options = {}) { + const output = [] + colorizeToCallback(diff, options, (color, line) => output.push(`${color}${line}`)) + return output +} + +const colorize = function (diff, options = {}) { + const output = [] + colorizeToCallback(diff, options, function (color, line) { + if (options.color != null ? options.color : true) { + return output.push(((options.theme != null ? options.theme[color] : undefined) != null ? (options.theme != null ? options.theme[color] : undefined) : Theme[color])(`${color}${line}`) + '\n') + } else { + return output.push(`${color}${line}\n`) + } + }) + return output.join('') +} + +module.exports = { colorize, colorizeToArray, colorizeToCallback } + + +/***/ }), + +/***/ 2078: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const { SequenceMatcher } = __nccwpck_require__(9228) +const { extendedTypeOf, roundObj } = __nccwpck_require__(7616) +const { colorize, colorizeToCallback } = __nccwpck_require__(207) + +class JsonDiff { + constructor (options) { + options.outputKeys = options.outputKeys || [] + options.excludeKeys = options.excludeKeys || [] + this.options = options + } + + isScalar (obj) { + return typeof obj !== 'object' || obj === null + } + + objectDiff (obj1, obj2) { + let result = {} + let score = 0 + let equal = true + + for (const [key, value] of Object.entries(obj1)) { + if (!this.options.outputNewOnly) { + const postfix = '__deleted' + + if (!(key in obj2) && !(this.options.excludeKeys.includes(key))) { + result[`${key}${postfix}`] = value + score -= 30 + equal = false + } + } + } + + for (const [key, value] of Object.entries(obj2)) { + const postfix = !this.options.outputNewOnly ? '__added' : '' + + if (!(key in obj1) && !(this.options.excludeKeys.includes(key))) { + result[`${key}${postfix}`] = value + score -= 30 + equal = false + } + } + + for (const [key, value1] of Object.entries(obj1)) { + if (key in obj2) { + if (this.options.excludeKeys.includes(key)) { + continue + } + score += 20 + const value2 = obj2[key] + const change = this.diff(value1, value2) + if (!change.equal) { + result[key] = change.result + equal = false + } else if (this.options.full || this.options.outputKeys.includes(key)) { + result[key] = value1 + } + // console.log(`key ${key} change.score=${change.score} ${change.result}`) + score += Math.min(20, Math.max(-10, change.score / 5)) // BATMAN! + } + } + + if (equal) { + score = 100 * Math.max(Object.keys(obj1).length, 0.5) + if (!this.options.full) { + result = undefined + } + } else { + score = Math.max(0, score) + } + + // console.log(`objectDiff(${JSON.stringify(obj1, null, 2)} <=> ${JSON.stringify(obj2, null, 2)}) == ${JSON.stringify({score, result, equal})}`) + return { score, result, equal } + } + + findMatchingObject (item, index, fuzzyOriginals) { + // console.log('findMatchingObject: ' + JSON.stringify({item, fuzzyOriginals}, null, 2)) + let bestMatch = null + + for (const [key, { item: candidate, index: matchIndex }] of Object.entries(fuzzyOriginals)) { + if (key !== '__next') { + const indexDistance = Math.abs(matchIndex - index) + if (extendedTypeOf(item) === extendedTypeOf(candidate)) { + const { score } = this.diff(item, candidate) + if ( + !bestMatch || + score > bestMatch.score || + (score === bestMatch.score && + indexDistance < bestMatch.indexDistance) + ) { + bestMatch = { score, key, indexDistance } + } + } + } + } + + // console.log('findMatchingObject result = ' + JSON.stringify(bestMatch, null, 2)); + return bestMatch + } + + scalarize (array, originals, fuzzyOriginals) { + // console.log('scalarize', array, originals, fuzzyOriginals); + const fuzzyMatches = [] + if (fuzzyOriginals) { + // Find best fuzzy match for each object in the array + const keyScores = {} + for (let index = 0; index < array.length; index++) { + const item = array[index] + if (this.isScalar(item)) { + continue + } + const bestMatch = this.findMatchingObject(item, index, fuzzyOriginals) + if (bestMatch && (!keyScores[bestMatch.key] || bestMatch.score > keyScores[bestMatch.key].score)) { + keyScores[bestMatch.key] = { score: bestMatch.score, index } + } + } + for (const [key, match] of Object.entries(keyScores)) { + fuzzyMatches[match.index] = key + } + } + + const result = [] + for (let index = 0; index < array.length; index++) { + const item = array[index] + if (this.isScalar(item)) { + result.push(item) + } else { + const key = fuzzyMatches[index] || '__$!SCALAR' + originals.__next++ + originals[key] = { item, index } + result.push(key) + } + } + // console.log('Scalarize result', result); + return result + } + + isScalarized (item, originals) { + return typeof item === 'string' && item in originals + } + + descalarize (item, originals) { + if (this.isScalarized(item, originals)) { + return originals[item].item + } else { + return item + } + } + + arrayDiff (obj1, obj2) { + const originals1 = { __next: 1 } + const seq1 = this.scalarize(obj1, originals1) + const originals2 = { __next: originals1.__next } + const seq2 = this.scalarize(obj2, originals2, originals1) + + if (this.options.sort) { + seq1.sort() + seq2.sort() + } + const opcodes = new SequenceMatcher(null, seq1, seq2).getOpcodes() + + // console.log(`arrayDiff:\nobj1 = ${JSON.stringify(obj1, null, 2)}\nobj2 = ${JSON.stringify(obj2, null, 2)}\nseq1 = ${JSON.stringify(seq1, null, 2)}\nseq2 = ${JSON.stringify(seq2, null, 2)}\nopcodes = ${JSON.stringify(opcodes, null, 2)}`) + + let result = [] + let score = 0 + let equal = true + + for (const [op, i1, i2, j1, j2] of opcodes) { + let i, j + let asc, end + let asc1, end1 + let asc2, end2 + if (!(op === 'equal' || (this.options.keysOnly && op === 'replace'))) { + equal = false + } + + switch (op) { + case 'equal': + for ( + i = i1, end = i2, asc = i1 <= end; + asc ? i < end : i > end; + asc ? i++ : i-- + ) { + const item = seq1[i] + if (this.isScalarized(item, originals1)) { + if (!this.isScalarized(item, originals2)) { + throw new Error( + `internal bug: isScalarized(item, originals1) != isScalarized(item, originals2) for item ${JSON.stringify( + item + )}` + ) + } + const item1 = this.descalarize(item, originals1) + const item2 = this.descalarize(item, originals2) + const change = this.diff(item1, item2) + if (!change.equal) { + result.push(['~', change.result]) + equal = false + } else { + if (this.options.full || this.options.keepUnchangedValues) { + result.push([' ', item1]) + } else { + result.push([' ']) + } + } + } else { + if (this.options.full || this.options.keepUnchangedValues) { + result.push([' ', item]) + } else { + result.push([' ']) + } + } + score += 10 + } + break + case 'delete': + for ( + i = i1, end1 = i2, asc1 = i1 <= end1; + asc1 ? i < end1 : i > end1; + asc1 ? i++ : i-- + ) { + result.push(['-', this.descalarize(seq1[i], originals1)]) + score -= 5 + } + break + case 'insert': + for ( + j = j1, end2 = j2, asc2 = j1 <= end2; + asc2 ? j < end2 : j > end2; + asc2 ? j++ : j-- + ) { + result.push(['+', this.descalarize(seq2[j], originals2)]) + score -= 5 + } + break + case 'replace': + if (!this.options.keysOnly) { + let asc3, end3 + let asc4, end4 + for ( + i = i1, end3 = i2, asc3 = i1 <= end3; + asc3 ? i < end3 : i > end3; + asc3 ? i++ : i-- + ) { + result.push(['-', this.descalarize(seq1[i], originals1)]) + score -= 5 + } + for ( + j = j1, end4 = j2, asc4 = j1 <= end4; + asc4 ? j < end4 : j > end4; + asc4 ? j++ : j-- + ) { + result.push(['+', this.descalarize(seq2[j], originals2)]) + score -= 5 + } + } else { + let asc5, end5 + for ( + i = i1, end5 = i2, asc5 = i1 <= end5; + asc5 ? i < end5 : i > end5; + asc5 ? i++ : i-- + ) { + const change = this.diff( + this.descalarize(seq1[i], originals1), + this.descalarize(seq2[i - i1 + j1], originals2) + ) + if (!change.equal) { + result.push(['~', change.result]) + equal = false + } else { + result.push([' ']) + } + } + } + break + } + } + + if (equal || opcodes.length === 0) { + if (!this.options.full) { + result = undefined + } else { + result = obj1 + } + score = 100 + } else { + score = Math.max(0, score) + } + + return { score, result, equal } + } + + diff (obj1, obj2) { + const type1 = extendedTypeOf(obj1) + const type2 = extendedTypeOf(obj2) + + if (type1 === type2) { + switch (type1) { + case 'object': + return this.objectDiff(obj1, obj2) + + case 'array': + return this.arrayDiff(obj1, obj2) + } + } + + // Compare primitives or complex objects of different types + let score = 100 + let result = obj1 + let equal + if (!this.options.keysOnly) { + if (type1 === 'date' && type2 === 'date') { + equal = obj1.getTime() === obj2.getTime() + } else { + equal = obj1 === obj2 + } + if (!equal) { + score = 0 + + if (this.options.outputNewOnly) { + result = obj2 + } else { + result = { __old: obj1, __new: obj2 } + } + } else if (!this.options.full) { + result = undefined + } + } else { + equal = true + result = undefined + } + + // console.log(`diff: equal ${equal} obj1 ${obj1} obj2 ${obj2} score ${score} ${result || ''}`) + + return { score, result, equal } + } +} + +function diff (obj1, obj2, options = {}) { + if (options.precision !== undefined) { + obj1 = roundObj(obj1, options.precision) + obj2 = roundObj(obj2, options.precision) + } + return new JsonDiff(options).diff(obj1, obj2).result +} + +function diffString (obj1, obj2, options = {}) { + return colorize(diff(obj1, obj2, options), options) +} + +module.exports = { diff, diffString, colorize, colorizeToCallback } + + +/***/ }), + +/***/ 7616: +/***/ ((module) => { + +const extendedTypeOf = function (obj) { + const result = typeof obj + if (obj == null) { + return 'null' + } else if (result === 'object' && obj.constructor === Array) { + return 'array' + } else if (result === 'object' && obj instanceof Date) { + return 'date' + } else { + return result + } +} + +const roundObj = function (data, precision) { + const type = typeof data + if (type === 'array') { + return data.map((x) => roundObj(x, precision)) + } else if (type === 'object') { + for (const key in data) { + data[key] = roundObj(data[key], precision) + } + return data + } else if ( + type === 'number' && + Number.isFinite(data) && + !Number.isInteger(data) + ) { + return +data.toFixed(precision) + } else { + return data + } +} + +module.exports = { extendedTypeOf, roundObj } + + +/***/ }), + +/***/ 5560: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var wrappy = __nccwpck_require__(8264) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} + + +/***/ }), + +/***/ 6496: +/***/ ((module) => { + +const hexRegex = /^[-+]?0x[a-fA-F0-9]+$/; +const numRegex = /^([\-\+])?(0*)(\.[0-9]+([eE]\-?[0-9]+)?|[0-9]+(\.[0-9]+([eE]\-?[0-9]+)?)?)$/; +// const octRegex = /0x[a-z0-9]+/; +// const binRegex = /0x[a-z0-9]+/; + + +//polyfill +if (!Number.parseInt && window.parseInt) { + Number.parseInt = window.parseInt; +} +if (!Number.parseFloat && window.parseFloat) { + Number.parseFloat = window.parseFloat; +} + + +const consider = { + hex : true, + leadingZeros: true, + decimalPoint: "\.", + eNotation: true + //skipLike: /regex/ +}; + +function toNumber(str, options = {}){ + // const options = Object.assign({}, consider); + // if(opt.leadingZeros === false){ + // options.leadingZeros = false; + // }else if(opt.hex === false){ + // options.hex = false; + // } + + options = Object.assign({}, consider, options ); + if(!str || typeof str !== "string" ) return str; + + let trimmedStr = str.trim(); + // if(trimmedStr === "0.0") return 0; + // else if(trimmedStr === "+0.0") return 0; + // else if(trimmedStr === "-0.0") return -0; + + if(options.skipLike !== undefined && options.skipLike.test(trimmedStr)) return str; + else if (options.hex && hexRegex.test(trimmedStr)) { + return Number.parseInt(trimmedStr, 16); + // } else if (options.parseOct && octRegex.test(str)) { + // return Number.parseInt(val, 8); + // }else if (options.parseBin && binRegex.test(str)) { + // return Number.parseInt(val, 2); + }else{ + //separate negative sign, leading zeros, and rest number + const match = numRegex.exec(trimmedStr); + if(match){ + const sign = match[1]; + const leadingZeros = match[2]; + let numTrimmedByZeros = trimZeros(match[3]); //complete num without leading zeros + //trim ending zeros for floating number + + const eNotation = match[4] || match[6]; + if(!options.leadingZeros && leadingZeros.length > 0 && sign && trimmedStr[2] !== ".") return str; //-0123 + else if(!options.leadingZeros && leadingZeros.length > 0 && !sign && trimmedStr[1] !== ".") return str; //0123 + else{//no leading zeros or leading zeros are allowed + const num = Number(trimmedStr); + const numStr = "" + num; + if(numStr.search(/[eE]/) !== -1){ //given number is long and parsed to eNotation + if(options.eNotation) return num; + else return str; + }else if(eNotation){ //given number has enotation + if(options.eNotation) return num; + else return str; + }else if(trimmedStr.indexOf(".") !== -1){ //floating number + // const decimalPart = match[5].substr(1); + // const intPart = trimmedStr.substr(0,trimmedStr.indexOf(".")); + + + // const p = numStr.indexOf("."); + // const givenIntPart = numStr.substr(0,p); + // const givenDecPart = numStr.substr(p+1); + if(numStr === "0" && (numTrimmedByZeros === "") ) return num; //0.0 + else if(numStr === numTrimmedByZeros) return num; //0.456. 0.79000 + else if( sign && numStr === "-"+numTrimmedByZeros) return num; + else return str; + } + + if(leadingZeros){ + // if(numTrimmedByZeros === numStr){ + // if(options.leadingZeros) return num; + // else return str; + // }else return str; + if(numTrimmedByZeros === numStr) return num; + else if(sign+numTrimmedByZeros === numStr) return num; + else return str; + } + + if(trimmedStr === numStr) return num; + else if(trimmedStr === sign+numStr) return num; + // else{ + // //number with +/- sign + // trimmedStr.test(/[-+][0-9]); + + // } + return str; + } + // else if(!eNotation && trimmedStr && trimmedStr !== Number(trimmedStr) ) return str; + + }else{ //non-numeric string + return str; + } + } +} + +/** + * + * @param {string} numStr without leading zeros + * @returns + */ +function trimZeros(numStr){ + if(numStr && numStr.indexOf(".") !== -1){//float + numStr = numStr.replace(/0+$/, ""); //remove ending zeros + if(numStr === ".") numStr = "0"; + else if(numStr[0] === ".") numStr = "0"+numStr; + else if(numStr[numStr.length-1] === ".") numStr = numStr.substr(0,numStr.length-1); + return numStr; + } + return numStr; +} +module.exports = toNumber + + +/***/ }), + +/***/ 1288: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +/*! + * Tmp + * + * Copyright (c) 2011-2017 KARASZI Istvan + * + * MIT Licensed + */ + +/* + * Module dependencies. + */ +const fs = __nccwpck_require__(9896); +const os = __nccwpck_require__(857); +const path = __nccwpck_require__(6928); +const crypto = __nccwpck_require__(6982); +const _c = { fs: fs.constants, os: os.constants }; + +/* + * The working inner variables. + */ +const + // the random characters to choose from + RANDOM_CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz', + + TEMPLATE_PATTERN = /XXXXXX/, + + DEFAULT_TRIES = 3, + + CREATE_FLAGS = (_c.O_CREAT || _c.fs.O_CREAT) | (_c.O_EXCL || _c.fs.O_EXCL) | (_c.O_RDWR || _c.fs.O_RDWR), + + // constants are off on the windows platform and will not match the actual errno codes + IS_WIN32 = os.platform() === 'win32', + EBADF = _c.EBADF || _c.os.errno.EBADF, + ENOENT = _c.ENOENT || _c.os.errno.ENOENT, + + DIR_MODE = 0o700 /* 448 */, + FILE_MODE = 0o600 /* 384 */, + + EXIT = 'exit', + + // this will hold the objects need to be removed on exit + _removeObjects = [], + + // API change in fs.rmdirSync leads to error when passing in a second parameter, e.g. the callback + FN_RMDIR_SYNC = fs.rmdirSync.bind(fs); + +let + _gracefulCleanup = false; + +/** + * Recursively remove a directory and its contents. + * + * @param {string} dirPath path of directory to remove + * @param {Function} callback + * @private + */ +function rimraf(dirPath, callback) { + return fs.rm(dirPath, { recursive: true }, callback); +} + +/** + * Recursively remove a directory and its contents, synchronously. + * + * @param {string} dirPath path of directory to remove + * @private + */ +function FN_RIMRAF_SYNC(dirPath) { + return fs.rmSync(dirPath, { recursive: true }); +} + +/** + * Gets a temporary file name. + * + * @param {(Options|tmpNameCallback)} options options or callback + * @param {?tmpNameCallback} callback the callback function + */ +function tmpName(options, callback) { + const + args = _parseArguments(options, callback), + opts = args[0], + cb = args[1]; + + try { + _assertAndSanitizeOptions(opts); + } catch (err) { + return cb(err); + } + + let tries = opts.tries; + (function _getUniqueName() { + try { + const name = _generateTmpName(opts); + + // check whether the path exists then retry if needed + fs.stat(name, function (err) { + /* istanbul ignore else */ + if (!err) { + /* istanbul ignore else */ + if (tries-- > 0) return _getUniqueName(); + + return cb(new Error('Could not get a unique tmp filename, max tries reached ' + name)); + } + + cb(null, name); + }); + } catch (err) { + cb(err); + } + }()); +} + +/** + * Synchronous version of tmpName. + * + * @param {Object} options + * @returns {string} the generated random name + * @throws {Error} if the options are invalid or could not generate a filename + */ +function tmpNameSync(options) { + const + args = _parseArguments(options), + opts = args[0]; + + _assertAndSanitizeOptions(opts); + + let tries = opts.tries; + do { + const name = _generateTmpName(opts); + try { + fs.statSync(name); + } catch (e) { + return name; + } + } while (tries-- > 0); + + throw new Error('Could not get a unique tmp filename, max tries reached'); +} + +/** + * Creates and opens a temporary file. + * + * @param {(Options|null|undefined|fileCallback)} options the config options or the callback function or null or undefined + * @param {?fileCallback} callback + */ +function file(options, callback) { + const + args = _parseArguments(options, callback), + opts = args[0], + cb = args[1]; + + // gets a temporary filename + tmpName(opts, function _tmpNameCreated(err, name) { + /* istanbul ignore else */ + if (err) return cb(err); + + // create and open the file + fs.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err, fd) { + /* istanbu ignore else */ + if (err) return cb(err); + + if (opts.discardDescriptor) { + return fs.close(fd, function _discardCallback(possibleErr) { + // the chance of getting an error on close here is rather low and might occur in the most edgiest cases only + return cb(possibleErr, name, undefined, _prepareTmpFileRemoveCallback(name, -1, opts, false)); + }); + } else { + // detachDescriptor passes the descriptor whereas discardDescriptor closes it, either way, we no longer care + // about the descriptor + const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor; + cb(null, name, fd, _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, false)); + } + }); + }); +} + +/** + * Synchronous version of file. + * + * @param {Options} options + * @returns {FileSyncObject} object consists of name, fd and removeCallback + * @throws {Error} if cannot create a file + */ +function fileSync(options) { + const + args = _parseArguments(options), + opts = args[0]; + + const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor; + const name = tmpNameSync(opts); + var fd = fs.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE); + /* istanbul ignore else */ + if (opts.discardDescriptor) { + fs.closeSync(fd); + fd = undefined; + } + + return { + name: name, + fd: fd, + removeCallback: _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, true) + }; +} + +/** + * Creates a temporary directory. + * + * @param {(Options|dirCallback)} options the options or the callback function + * @param {?dirCallback} callback + */ +function dir(options, callback) { + const + args = _parseArguments(options, callback), + opts = args[0], + cb = args[1]; + + // gets a temporary filename + tmpName(opts, function _tmpNameCreated(err, name) { + /* istanbul ignore else */ + if (err) return cb(err); + + // create the directory + fs.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err) { + /* istanbul ignore else */ + if (err) return cb(err); + + cb(null, name, _prepareTmpDirRemoveCallback(name, opts, false)); + }); + }); +} + +/** + * Synchronous version of dir. + * + * @param {Options} options + * @returns {DirSyncObject} object consists of name and removeCallback + * @throws {Error} if it cannot create a directory + */ +function dirSync(options) { + const + args = _parseArguments(options), + opts = args[0]; + + const name = tmpNameSync(opts); + fs.mkdirSync(name, opts.mode || DIR_MODE); + + return { + name: name, + removeCallback: _prepareTmpDirRemoveCallback(name, opts, true) + }; +} + +/** + * Removes files asynchronously. + * + * @param {Object} fdPath + * @param {Function} next + * @private + */ +function _removeFileAsync(fdPath, next) { + const _handler = function (err) { + if (err && !_isENOENT(err)) { + // reraise any unanticipated error + return next(err); + } + next(); + }; + + if (0 <= fdPath[0]) + fs.close(fdPath[0], function () { + fs.unlink(fdPath[1], _handler); + }); + else fs.unlink(fdPath[1], _handler); +} + +/** + * Removes files synchronously. + * + * @param {Object} fdPath + * @private + */ +function _removeFileSync(fdPath) { + let rethrownException = null; + try { + if (0 <= fdPath[0]) fs.closeSync(fdPath[0]); + } catch (e) { + // reraise any unanticipated error + if (!_isEBADF(e) && !_isENOENT(e)) throw e; + } finally { + try { + fs.unlinkSync(fdPath[1]); + } + catch (e) { + // reraise any unanticipated error + if (!_isENOENT(e)) rethrownException = e; + } + } + if (rethrownException !== null) { + throw rethrownException; + } +} + +/** + * Prepares the callback for removal of the temporary file. + * + * Returns either a sync callback or a async callback depending on whether + * fileSync or file was called, which is expressed by the sync parameter. + * + * @param {string} name the path of the file + * @param {number} fd file descriptor + * @param {Object} opts + * @param {boolean} sync + * @returns {fileCallback | fileCallbackSync} + * @private + */ +function _prepareTmpFileRemoveCallback(name, fd, opts, sync) { + const removeCallbackSync = _prepareRemoveCallback(_removeFileSync, [fd, name], sync); + const removeCallback = _prepareRemoveCallback(_removeFileAsync, [fd, name], sync, removeCallbackSync); + + if (!opts.keep) _removeObjects.unshift(removeCallbackSync); + + return sync ? removeCallbackSync : removeCallback; +} + +/** + * Prepares the callback for removal of the temporary directory. + * + * Returns either a sync callback or a async callback depending on whether + * tmpFileSync or tmpFile was called, which is expressed by the sync parameter. + * + * @param {string} name + * @param {Object} opts + * @param {boolean} sync + * @returns {Function} the callback + * @private + */ +function _prepareTmpDirRemoveCallback(name, opts, sync) { + const removeFunction = opts.unsafeCleanup ? rimraf : fs.rmdir.bind(fs); + const removeFunctionSync = opts.unsafeCleanup ? FN_RIMRAF_SYNC : FN_RMDIR_SYNC; + const removeCallbackSync = _prepareRemoveCallback(removeFunctionSync, name, sync); + const removeCallback = _prepareRemoveCallback(removeFunction, name, sync, removeCallbackSync); + if (!opts.keep) _removeObjects.unshift(removeCallbackSync); + + return sync ? removeCallbackSync : removeCallback; +} + +/** + * Creates a guarded function wrapping the removeFunction call. + * + * The cleanup callback is save to be called multiple times. + * Subsequent invocations will be ignored. + * + * @param {Function} removeFunction + * @param {string} fileOrDirName + * @param {boolean} sync + * @param {cleanupCallbackSync?} cleanupCallbackSync + * @returns {cleanupCallback | cleanupCallbackSync} + * @private + */ +function _prepareRemoveCallback(removeFunction, fileOrDirName, sync, cleanupCallbackSync) { + let called = false; + + // if sync is true, the next parameter will be ignored + return function _cleanupCallback(next) { + + /* istanbul ignore else */ + if (!called) { + // remove cleanupCallback from cache + const toRemove = cleanupCallbackSync || _cleanupCallback; + const index = _removeObjects.indexOf(toRemove); + /* istanbul ignore else */ + if (index >= 0) _removeObjects.splice(index, 1); + + called = true; + if (sync || removeFunction === FN_RMDIR_SYNC || removeFunction === FN_RIMRAF_SYNC) { + return removeFunction(fileOrDirName); + } else { + return removeFunction(fileOrDirName, next || function() {}); + } + } + }; +} + +/** + * The garbage collector. + * + * @private + */ +function _garbageCollector() { + /* istanbul ignore else */ + if (!_gracefulCleanup) return; + + // the function being called removes itself from _removeObjects, + // loop until _removeObjects is empty + while (_removeObjects.length) { + try { + _removeObjects[0](); + } catch (e) { + // already removed? + } + } +} + +/** + * Random name generator based on crypto. + * Adapted from http://blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript + * + * @param {number} howMany + * @returns {string} the generated random name + * @private + */ +function _randomChars(howMany) { + let + value = [], + rnd = null; + + // make sure that we do not fail because we ran out of entropy + try { + rnd = crypto.randomBytes(howMany); + } catch (e) { + rnd = crypto.pseudoRandomBytes(howMany); + } + + for (var i = 0; i < howMany; i++) { + value.push(RANDOM_CHARS[rnd[i] % RANDOM_CHARS.length]); + } + + return value.join(''); +} + +/** + * Helper which determines whether a string s is blank, that is undefined, or empty or null. + * + * @private + * @param {string} s + * @returns {Boolean} true whether the string s is blank, false otherwise + */ +function _isBlank(s) { + return s === null || _isUndefined(s) || !s.trim(); +} + +/** + * Checks whether the `obj` parameter is defined or not. + * + * @param {Object} obj + * @returns {boolean} true if the object is undefined + * @private + */ +function _isUndefined(obj) { + return typeof obj === 'undefined'; +} + +/** + * Parses the function arguments. + * + * This function helps to have optional arguments. + * + * @param {(Options|null|undefined|Function)} options + * @param {?Function} callback + * @returns {Array} parsed arguments + * @private + */ +function _parseArguments(options, callback) { + /* istanbul ignore else */ + if (typeof options === 'function') { + return [{}, options]; + } + + /* istanbul ignore else */ + if (_isUndefined(options)) { + return [{}, callback]; + } + + // copy options so we do not leak the changes we make internally + const actualOptions = {}; + for (const key of Object.getOwnPropertyNames(options)) { + actualOptions[key] = options[key]; + } + + return [actualOptions, callback]; +} + +/** + * Generates a new temporary name. + * + * @param {Object} opts + * @returns {string} the new random name according to opts + * @private + */ +function _generateTmpName(opts) { + + const tmpDir = opts.tmpdir; + + /* istanbul ignore else */ + if (!_isUndefined(opts.name)) + return path.join(tmpDir, opts.dir, opts.name); + + /* istanbul ignore else */ + if (!_isUndefined(opts.template)) + return path.join(tmpDir, opts.dir, opts.template).replace(TEMPLATE_PATTERN, _randomChars(6)); + + // prefix and postfix + const name = [ + opts.prefix ? opts.prefix : 'tmp', + '-', + process.pid, + '-', + _randomChars(12), + opts.postfix ? '-' + opts.postfix : '' + ].join(''); + + return path.join(tmpDir, opts.dir, name); +} + +/** + * Asserts whether the specified options are valid, also sanitizes options and provides sane defaults for missing + * options. + * + * @param {Options} options + * @private + */ +function _assertAndSanitizeOptions(options) { + + options.tmpdir = _getTmpDir(options); + + const tmpDir = options.tmpdir; + + /* istanbul ignore else */ + if (!_isUndefined(options.name)) + _assertIsRelative(options.name, 'name', tmpDir); + /* istanbul ignore else */ + if (!_isUndefined(options.dir)) + _assertIsRelative(options.dir, 'dir', tmpDir); + /* istanbul ignore else */ + if (!_isUndefined(options.template)) { + _assertIsRelative(options.template, 'template', tmpDir); + if (!options.template.match(TEMPLATE_PATTERN)) + throw new Error(`Invalid template, found "${options.template}".`); + } + /* istanbul ignore else */ + if (!_isUndefined(options.tries) && isNaN(options.tries) || options.tries < 0) + throw new Error(`Invalid tries, found "${options.tries}".`); + + // if a name was specified we will try once + options.tries = _isUndefined(options.name) ? options.tries || DEFAULT_TRIES : 1; + options.keep = !!options.keep; + options.detachDescriptor = !!options.detachDescriptor; + options.discardDescriptor = !!options.discardDescriptor; + options.unsafeCleanup = !!options.unsafeCleanup; + + // sanitize dir, also keep (multiple) blanks if the user, purportedly sane, requests us to + options.dir = _isUndefined(options.dir) ? '' : path.relative(tmpDir, _resolvePath(options.dir, tmpDir)); + options.template = _isUndefined(options.template) ? undefined : path.relative(tmpDir, _resolvePath(options.template, tmpDir)); + // sanitize further if template is relative to options.dir + options.template = _isBlank(options.template) ? undefined : path.relative(options.dir, options.template); + + // for completeness' sake only, also keep (multiple) blanks if the user, purportedly sane, requests us to + options.name = _isUndefined(options.name) ? undefined : options.name; + options.prefix = _isUndefined(options.prefix) ? '' : options.prefix; + options.postfix = _isUndefined(options.postfix) ? '' : options.postfix; +} + +/** + * Resolve the specified path name in respect to tmpDir. + * + * The specified name might include relative path components, e.g. ../ + * so we need to resolve in order to be sure that is is located inside tmpDir + * + * @param name + * @param tmpDir + * @returns {string} + * @private + */ +function _resolvePath(name, tmpDir) { + if (name.startsWith(tmpDir)) { + return path.resolve(name); + } else { + return path.resolve(path.join(tmpDir, name)); + } +} + +/** + * Asserts whether specified name is relative to the specified tmpDir. + * + * @param {string} name + * @param {string} option + * @param {string} tmpDir + * @throws {Error} + * @private + */ +function _assertIsRelative(name, option, tmpDir) { + if (option === 'name') { + // assert that name is not absolute and does not contain a path + if (path.isAbsolute(name)) + throw new Error(`${option} option must not contain an absolute path, found "${name}".`); + // must not fail on valid . or .. or similar such constructs + let basename = path.basename(name); + if (basename === '..' || basename === '.' || basename !== name) + throw new Error(`${option} option must not contain a path, found "${name}".`); + } + else { // if (option === 'dir' || option === 'template') { + // assert that dir or template are relative to tmpDir + if (path.isAbsolute(name) && !name.startsWith(tmpDir)) { + throw new Error(`${option} option must be relative to "${tmpDir}", found "${name}".`); + } + let resolvedPath = _resolvePath(name, tmpDir); + if (!resolvedPath.startsWith(tmpDir)) + throw new Error(`${option} option must be relative to "${tmpDir}", found "${resolvedPath}".`); + } +} + +/** + * Helper for testing against EBADF to compensate changes made to Node 7.x under Windows. + * + * @private + */ +function _isEBADF(error) { + return _isExpectedError(error, -EBADF, 'EBADF'); +} + +/** + * Helper for testing against ENOENT to compensate changes made to Node 7.x under Windows. + * + * @private + */ +function _isENOENT(error) { + return _isExpectedError(error, -ENOENT, 'ENOENT'); +} + +/** + * Helper to determine whether the expected error code matches the actual code and errno, + * which will differ between the supported node versions. + * + * - Node >= 7.0: + * error.code {string} + * error.errno {number} any numerical value will be negated + * + * CAVEAT + * + * On windows, the errno for EBADF is -4083 but os.constants.errno.EBADF is different and we must assume that ENOENT + * is no different here. + * + * @param {SystemError} error + * @param {number} errno + * @param {string} code + * @private + */ +function _isExpectedError(error, errno, code) { + return IS_WIN32 ? error.code === code : error.code === code && error.errno === errno; +} + +/** + * Sets the graceful cleanup. + * + * If graceful cleanup is set, tmp will remove all controlled temporary objects on process exit, otherwise the + * temporary objects will remain in place, waiting to be cleaned up on system restart or otherwise scheduled temporary + * object removals. + */ +function setGracefulCleanup() { + _gracefulCleanup = true; +} + +/** + * Returns the currently configured tmp dir from os.tmpdir(). + * + * @private + * @param {?Options} options + * @returns {string} the currently configured tmp dir + */ +function _getTmpDir(options) { + return path.resolve(options && options.tmpdir || os.tmpdir()); +} + +// Install process exit listener +process.addListener(EXIT, _garbageCollector); + +/** + * Configuration options. + * + * @typedef {Object} Options + * @property {?boolean} keep the temporary object (file or dir) will not be garbage collected + * @property {?number} tries the number of tries before give up the name generation + * @property (?int) mode the access mode, defaults are 0o700 for directories and 0o600 for files + * @property {?string} template the "mkstemp" like filename template + * @property {?string} name fixed name relative to tmpdir or the specified dir option + * @property {?string} dir tmp directory relative to the root tmp directory in use + * @property {?string} prefix prefix for the generated name + * @property {?string} postfix postfix for the generated name + * @property {?string} tmpdir the root tmp directory which overrides the os tmpdir + * @property {?boolean} unsafeCleanup recursively removes the created temporary directory, even when it's not empty + * @property {?boolean} detachDescriptor detaches the file descriptor, caller is responsible for closing the file, tmp will no longer try closing the file during garbage collection + * @property {?boolean} discardDescriptor discards the file descriptor (closes file, fd is -1), tmp will no longer try closing the file during garbage collection + */ + +/** + * @typedef {Object} FileSyncObject + * @property {string} name the name of the file + * @property {string} fd the file descriptor or -1 if the fd has been discarded + * @property {fileCallback} removeCallback the callback function to remove the file + */ + +/** + * @typedef {Object} DirSyncObject + * @property {string} name the name of the directory + * @property {fileCallback} removeCallback the callback function to remove the directory + */ + +/** + * @callback tmpNameCallback + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + */ + +/** + * @callback fileCallback + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + * @param {number} fd the file descriptor or -1 if the fd had been discarded + * @param {cleanupCallback} fn the cleanup callback function + */ + +/** + * @callback fileCallbackSync + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + * @param {number} fd the file descriptor or -1 if the fd had been discarded + * @param {cleanupCallbackSync} fn the cleanup callback function + */ + +/** + * @callback dirCallback + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + * @param {cleanupCallback} fn the cleanup callback function + */ + +/** + * @callback dirCallbackSync + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + * @param {cleanupCallbackSync} fn the cleanup callback function + */ + +/** + * Removes the temporary created file or directory. + * + * @callback cleanupCallback + * @param {simpleCallback} [next] function to call whenever the tmp object needs to be removed + */ + +/** + * Removes the temporary created file or directory. + * + * @callback cleanupCallbackSync + */ + +/** + * Callback function for function composition. + * @see {@link https://github.com/raszi/node-tmp/issues/57|raszi/node-tmp#57} + * + * @callback simpleCallback + */ + +// exporting all the needed methods + +// evaluate _getTmpDir() lazily, mainly for simplifying testing but it also will +// allow users to reconfigure the temporary directory +Object.defineProperty(module.exports, "tmpdir", ({ + enumerable: true, + configurable: false, + get: function () { + return _getTmpDir(); + } +})); + +module.exports.dir = dir; +module.exports.dirSync = dirSync; + +module.exports.file = file; +module.exports.fileSync = fileSync; + +module.exports.tmpName = tmpName; +module.exports.tmpNameSync = tmpNameSync; + +module.exports.setGracefulCleanup = setGracefulCleanup; + + +/***/ }), + +/***/ 1860: +/***/ ((module) => { + +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, Symbol, Reflect, Promise, SuppressedError, Iterator */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __esDecorate; +var __runInitializers; +var __propKey; +var __setFunctionName; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +var __addDisposableResource; +var __disposeResources; +var __rewriteRelativeImportExtension; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if ( true && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __esDecorate = function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { + function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } + var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; + var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; + var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); + var _, done = false; + for (var i = decorators.length - 1; i >= 0; i--) { + var context = {}; + for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context.access[p] = contextIn.access[p]; + context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); + if (kind === "accessor") { + if (result === void 0) continue; + if (result === null || typeof result !== "object") throw new TypeError("Object expected"); + if (_ = accept(result.get)) descriptor.get = _; + if (_ = accept(result.set)) descriptor.set = _; + if (_ = accept(result.init)) initializers.unshift(_); + } + else if (_ = accept(result)) { + if (kind === "field") initializers.unshift(_); + else descriptor[key] = _; + } + } + if (target) Object.defineProperty(target, contextIn.name, descriptor); + done = true; + }; + + __runInitializers = function (thisArg, initializers, value) { + var useValue = arguments.length > 2; + for (var i = 0; i < initializers.length; i++) { + value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); + } + return useValue ? value : void 0; + }; + + __propKey = function (x) { + return typeof x === "symbol" ? x : "".concat(x); + }; + + __setFunctionName = function (f, name, prefix) { + if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; + return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); + return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function () { return this; }, i; + function awaitReturn(f) { return function (v) { return Promise.resolve(v).then(f, reject); }; } + function verb(n, f) { if (g[n]) { i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; if (f) i[n] = f(i[n]); } } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + __addDisposableResource = function (env, value, async) { + if (value !== null && value !== void 0) { + if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); + var dispose, inner; + if (async) { + if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); + dispose = value[Symbol.asyncDispose]; + } + if (dispose === void 0) { + if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); + dispose = value[Symbol.dispose]; + if (async) inner = dispose; + } + if (typeof dispose !== "function") throw new TypeError("Object not disposable."); + if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } }; + env.stack.push({ value: value, dispose: dispose, async: async }); + } + else if (async) { + env.stack.push({ async: true }); + } + return value; + }; + + var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { + var e = new Error(message); + return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; + }; + + __disposeResources = function (env) { + function fail(e) { + env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; + env.hasError = true; + } + var r, s = 0; + function next() { + while (r = env.stack.pop()) { + try { + if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next); + if (r.dispose) { + var result = r.dispose.call(r.value); + if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); + } + else s |= 1; + } + catch (e) { + fail(e); + } + } + if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve(); + if (env.hasError) throw env.error; + } + return next(); + }; + + __rewriteRelativeImportExtension = function (path, preserveJsx) { + if (typeof path === "string" && /^\.\.?\//.test(path)) { + return path.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function (m, tsx, d, ext, cm) { + return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : (d + ext + "." + cm.toLowerCase() + "js"); + }); + } + return path; + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__esDecorate", __esDecorate); + exporter("__runInitializers", __runInitializers); + exporter("__propKey", __propKey); + exporter("__setFunctionName", __setFunctionName); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); + exporter("__addDisposableResource", __addDisposableResource); + exporter("__disposeResources", __disposeResources); + exporter("__rewriteRelativeImportExtension", __rewriteRelativeImportExtension); +}); + +0 && (0); + + +/***/ }), + +/***/ 770: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = __nccwpck_require__(218); + + +/***/ }), + +/***/ 218: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var net = __nccwpck_require__(9278); +var tls = __nccwpck_require__(4756); +var http = __nccwpck_require__(8611); +var https = __nccwpck_require__(5692); +var events = __nccwpck_require__(4434); +var assert = __nccwpck_require__(2613); +var util = __nccwpck_require__(9023); + + +exports.httpOverHttp = httpOverHttp; +exports.httpsOverHttp = httpsOverHttp; +exports.httpOverHttps = httpOverHttps; +exports.httpsOverHttps = httpsOverHttps; + + +function httpOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + return agent; +} + +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} + +function httpOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + return agent; +} + +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} + + +function TunnelingAgent(options) { + var self = this; + self.options = options || {}; + self.proxyOptions = self.options.proxy || {}; + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; + self.requests = []; + self.sockets = []; + + self.on('free', function onFree(socket, host, port, localAddress) { + var options = toOptions(host, port, localAddress); + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i]; + if (pending.host === options.host && pending.port === options.port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1); + pending.request.onSocket(socket); + return; + } + } + socket.destroy(); + self.removeSocket(socket); + }); +} +util.inherits(TunnelingAgent, events.EventEmitter); + +TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { + var self = this; + var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); + + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push(options); + return; + } + + // If we are under maxSockets create a new one. + self.createSocket(options, function(socket) { + socket.on('free', onFree); + socket.on('close', onCloseOrRemove); + socket.on('agentRemove', onCloseOrRemove); + req.onSocket(socket); + + function onFree() { + self.emit('free', socket, options); + } + + function onCloseOrRemove(err) { + self.removeSocket(socket); + socket.removeListener('free', onFree); + socket.removeListener('close', onCloseOrRemove); + socket.removeListener('agentRemove', onCloseOrRemove); + } + }); +}; + +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this; + var placeholder = {}; + self.sockets.push(placeholder); + + var connectOptions = mergeOptions({}, self.proxyOptions, { + method: 'CONNECT', + path: options.host + ':' + options.port, + agent: false, + headers: { + host: options.host + ':' + options.port + } + }); + if (options.localAddress) { + connectOptions.localAddress = options.localAddress; + } + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {}; + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + new Buffer(connectOptions.proxyAuth).toString('base64'); + } + + debug('making CONNECT request'); + var connectReq = self.request(connectOptions); + connectReq.useChunkedEncodingByDefault = false; // for v0.6 + connectReq.once('response', onResponse); // for v0.6 + connectReq.once('upgrade', onUpgrade); // for v0.6 + connectReq.once('connect', onConnect); // for v0.7 or later + connectReq.once('error', onError); + connectReq.end(); + + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true; + } + + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head); + }); + } + + function onConnect(res, socket, head) { + connectReq.removeAllListeners(); + socket.removeAllListeners(); + + if (res.statusCode !== 200) { + debug('tunneling socket could not be established, statusCode=%d', + res.statusCode); + socket.destroy(); + var error = new Error('tunneling socket could not be established, ' + + 'statusCode=' + res.statusCode); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + if (head.length > 0) { + debug('got illegal response body from proxy'); + socket.destroy(); + var error = new Error('got illegal response body from proxy'); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + debug('tunneling connection has established'); + self.sockets[self.sockets.indexOf(placeholder)] = socket; + return cb(socket); + } + + function onError(cause) { + connectReq.removeAllListeners(); + + debug('tunneling socket could not be established, cause=%s\n', + cause.message, cause.stack); + var error = new Error('tunneling socket could not be established, ' + + 'cause=' + cause.message); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + } +}; + +TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { + var pos = this.sockets.indexOf(socket) + if (pos === -1) { + return; + } + this.sockets.splice(pos, 1); + + var pending = this.requests.shift(); + if (pending) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createSocket(pending, function(socket) { + pending.request.onSocket(socket); + }); + } +}; + +function createSecureSocket(options, cb) { + var self = this; + TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { + var hostHeader = options.request.getHeader('host'); + var tlsOptions = mergeOptions({}, self.options, { + socket: socket, + servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host + }); + + // 0 is dummy port for v0.6 + var secureSocket = tls.connect(0, tlsOptions); + self.sockets[self.sockets.indexOf(socket)] = secureSocket; + cb(secureSocket); + }); +} + + +function toOptions(host, port, localAddress) { + if (typeof host === 'string') { // since v0.10 + return { + host: host, + port: port, + localAddress: localAddress + }; + } + return host; // for v0.11 or later +} + +function mergeOptions(target) { + for (var i = 1, len = arguments.length; i < len; ++i) { + var overrides = arguments[i]; + if (typeof overrides === 'object') { + var keys = Object.keys(overrides); + for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { + var k = keys[j]; + if (overrides[k] !== undefined) { + target[k] = overrides[k]; + } + } + } + } + return target; +} + + +var debug; +if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { + debug = function() { + var args = Array.prototype.slice.call(arguments); + if (typeof args[0] === 'string') { + args[0] = 'TUNNEL: ' + args[0]; + } else { + args.unshift('TUNNEL:'); + } + console.error.apply(console, args); + } +} else { + debug = function() {}; +} +exports.debug = debug; // for test + + +/***/ }), + +/***/ 6752: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const Client = __nccwpck_require__(6197) +const Dispatcher = __nccwpck_require__(992) +const errors = __nccwpck_require__(8707) +const Pool = __nccwpck_require__(5076) +const BalancedPool = __nccwpck_require__(1093) +const Agent = __nccwpck_require__(9965) +const util = __nccwpck_require__(3440) +const { InvalidArgumentError } = errors +const api = __nccwpck_require__(6615) +const buildConnector = __nccwpck_require__(9136) +const MockClient = __nccwpck_require__(7365) +const MockAgent = __nccwpck_require__(7501) +const MockPool = __nccwpck_require__(4004) +const mockErrors = __nccwpck_require__(2429) +const ProxyAgent = __nccwpck_require__(2720) +const RetryHandler = __nccwpck_require__(3573) +const { getGlobalDispatcher, setGlobalDispatcher } = __nccwpck_require__(2581) +const DecoratorHandler = __nccwpck_require__(8840) +const RedirectHandler = __nccwpck_require__(8299) +const createRedirectInterceptor = __nccwpck_require__(4415) + +let hasCrypto +try { + __nccwpck_require__(6982) + hasCrypto = true +} catch { + hasCrypto = false +} + +Object.assign(Dispatcher.prototype, api) + +module.exports.Dispatcher = Dispatcher +module.exports.Client = Client +module.exports.Pool = Pool +module.exports.BalancedPool = BalancedPool +module.exports.Agent = Agent +module.exports.ProxyAgent = ProxyAgent +module.exports.RetryHandler = RetryHandler + +module.exports.DecoratorHandler = DecoratorHandler +module.exports.RedirectHandler = RedirectHandler +module.exports.createRedirectInterceptor = createRedirectInterceptor + +module.exports.buildConnector = buildConnector +module.exports.errors = errors + +function makeDispatcher (fn) { + return (url, opts, handler) => { + if (typeof opts === 'function') { + handler = opts + opts = null + } + + if (!url || (typeof url !== 'string' && typeof url !== 'object' && !(url instanceof URL))) { + throw new InvalidArgumentError('invalid url') + } + + if (opts != null && typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } + + if (opts && opts.path != null) { + if (typeof opts.path !== 'string') { + throw new InvalidArgumentError('invalid opts.path') + } + + let path = opts.path + if (!opts.path.startsWith('/')) { + path = `/${path}` + } + + url = new URL(util.parseOrigin(url).origin + path) + } else { + if (!opts) { + opts = typeof url === 'object' ? url : {} + } + + url = util.parseURL(url) + } + + const { agent, dispatcher = getGlobalDispatcher() } = opts + + if (agent) { + throw new InvalidArgumentError('unsupported opts.agent. Did you mean opts.client?') + } + + return fn.call(dispatcher, { + ...opts, + origin: url.origin, + path: url.search ? `${url.pathname}${url.search}` : url.pathname, + method: opts.method || (opts.body ? 'PUT' : 'GET') + }, handler) + } +} + +module.exports.setGlobalDispatcher = setGlobalDispatcher +module.exports.getGlobalDispatcher = getGlobalDispatcher + +if (util.nodeMajor > 16 || (util.nodeMajor === 16 && util.nodeMinor >= 8)) { + let fetchImpl = null + module.exports.fetch = async function fetch (resource) { + if (!fetchImpl) { + fetchImpl = (__nccwpck_require__(2315).fetch) + } + + try { + return await fetchImpl(...arguments) + } catch (err) { + if (typeof err === 'object') { + Error.captureStackTrace(err, this) + } + + throw err + } + } + module.exports.Headers = __nccwpck_require__(6349).Headers + module.exports.Response = __nccwpck_require__(8676).Response + module.exports.Request = __nccwpck_require__(5194).Request + module.exports.FormData = __nccwpck_require__(3073).FormData + module.exports.File = __nccwpck_require__(3041).File + module.exports.FileReader = __nccwpck_require__(2160).FileReader + + const { setGlobalOrigin, getGlobalOrigin } = __nccwpck_require__(5628) + + module.exports.setGlobalOrigin = setGlobalOrigin + module.exports.getGlobalOrigin = getGlobalOrigin + + const { CacheStorage } = __nccwpck_require__(4738) + const { kConstruct } = __nccwpck_require__(296) + + // Cache & CacheStorage are tightly coupled with fetch. Even if it may run + // in an older version of Node, it doesn't have any use without fetch. + module.exports.caches = new CacheStorage(kConstruct) +} + +if (util.nodeMajor >= 16) { + const { deleteCookie, getCookies, getSetCookies, setCookie } = __nccwpck_require__(3168) + + module.exports.deleteCookie = deleteCookie + module.exports.getCookies = getCookies + module.exports.getSetCookies = getSetCookies + module.exports.setCookie = setCookie + + const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(4322) + + module.exports.parseMIMEType = parseMIMEType + module.exports.serializeAMimeType = serializeAMimeType +} + +if (util.nodeMajor >= 18 && hasCrypto) { + const { WebSocket } = __nccwpck_require__(5171) + + module.exports.WebSocket = WebSocket +} + +module.exports.request = makeDispatcher(api.request) +module.exports.stream = makeDispatcher(api.stream) +module.exports.pipeline = makeDispatcher(api.pipeline) +module.exports.connect = makeDispatcher(api.connect) +module.exports.upgrade = makeDispatcher(api.upgrade) + +module.exports.MockClient = MockClient +module.exports.MockPool = MockPool +module.exports.MockAgent = MockAgent +module.exports.mockErrors = mockErrors + + +/***/ }), + +/***/ 9965: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { InvalidArgumentError } = __nccwpck_require__(8707) +const { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = __nccwpck_require__(6443) +const DispatcherBase = __nccwpck_require__(1) +const Pool = __nccwpck_require__(5076) +const Client = __nccwpck_require__(6197) +const util = __nccwpck_require__(3440) +const createRedirectInterceptor = __nccwpck_require__(4415) +const { WeakRef, FinalizationRegistry } = __nccwpck_require__(3194)() + +const kOnConnect = Symbol('onConnect') +const kOnDisconnect = Symbol('onDisconnect') +const kOnConnectionError = Symbol('onConnectionError') +const kMaxRedirections = Symbol('maxRedirections') +const kOnDrain = Symbol('onDrain') +const kFactory = Symbol('factory') +const kFinalizer = Symbol('finalizer') +const kOptions = Symbol('options') + +function defaultFactory (origin, opts) { + return opts && opts.connections === 1 + ? new Client(origin, opts) + : new Pool(origin, opts) +} + +class Agent extends DispatcherBase { + constructor ({ factory = defaultFactory, maxRedirections = 0, connect, ...options } = {}) { + super() + + if (typeof factory !== 'function') { + throw new InvalidArgumentError('factory must be a function.') + } + + if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { + throw new InvalidArgumentError('connect must be a function or an object') + } + + if (!Number.isInteger(maxRedirections) || maxRedirections < 0) { + throw new InvalidArgumentError('maxRedirections must be a positive number') + } + + if (connect && typeof connect !== 'function') { + connect = { ...connect } + } + + this[kInterceptors] = options.interceptors && options.interceptors.Agent && Array.isArray(options.interceptors.Agent) + ? options.interceptors.Agent + : [createRedirectInterceptor({ maxRedirections })] + + this[kOptions] = { ...util.deepClone(options), connect } + this[kOptions].interceptors = options.interceptors + ? { ...options.interceptors } + : undefined + this[kMaxRedirections] = maxRedirections + this[kFactory] = factory + this[kClients] = new Map() + this[kFinalizer] = new FinalizationRegistry(/* istanbul ignore next: gc is undeterministic */ key => { + const ref = this[kClients].get(key) + if (ref !== undefined && ref.deref() === undefined) { + this[kClients].delete(key) + } + }) + + const agent = this + + this[kOnDrain] = (origin, targets) => { + agent.emit('drain', origin, [agent, ...targets]) + } + + this[kOnConnect] = (origin, targets) => { + agent.emit('connect', origin, [agent, ...targets]) + } + + this[kOnDisconnect] = (origin, targets, err) => { + agent.emit('disconnect', origin, [agent, ...targets], err) + } + + this[kOnConnectionError] = (origin, targets, err) => { + agent.emit('connectionError', origin, [agent, ...targets], err) + } + } + + get [kRunning] () { + let ret = 0 + for (const ref of this[kClients].values()) { + const client = ref.deref() + /* istanbul ignore next: gc is undeterministic */ + if (client) { + ret += client[kRunning] + } + } + return ret + } + + [kDispatch] (opts, handler) { + let key + if (opts.origin && (typeof opts.origin === 'string' || opts.origin instanceof URL)) { + key = String(opts.origin) + } else { + throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.') + } + + const ref = this[kClients].get(key) + + let dispatcher = ref ? ref.deref() : null + if (!dispatcher) { + dispatcher = this[kFactory](opts.origin, this[kOptions]) + .on('drain', this[kOnDrain]) + .on('connect', this[kOnConnect]) + .on('disconnect', this[kOnDisconnect]) + .on('connectionError', this[kOnConnectionError]) + + this[kClients].set(key, new WeakRef(dispatcher)) + this[kFinalizer].register(dispatcher, key) + } + + return dispatcher.dispatch(opts, handler) + } + + async [kClose] () { + const closePromises = [] + for (const ref of this[kClients].values()) { + const client = ref.deref() + /* istanbul ignore else: gc is undeterministic */ + if (client) { + closePromises.push(client.close()) + } + } + + await Promise.all(closePromises) + } + + async [kDestroy] (err) { + const destroyPromises = [] + for (const ref of this[kClients].values()) { + const client = ref.deref() + /* istanbul ignore else: gc is undeterministic */ + if (client) { + destroyPromises.push(client.destroy(err)) + } + } + + await Promise.all(destroyPromises) + } +} + +module.exports = Agent + + +/***/ }), + +/***/ 158: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const { addAbortListener } = __nccwpck_require__(3440) +const { RequestAbortedError } = __nccwpck_require__(8707) + +const kListener = Symbol('kListener') +const kSignal = Symbol('kSignal') + +function abort (self) { + if (self.abort) { + self.abort() + } else { + self.onError(new RequestAbortedError()) + } +} + +function addSignal (self, signal) { + self[kSignal] = null + self[kListener] = null + + if (!signal) { + return + } + + if (signal.aborted) { + abort(self) + return + } + + self[kSignal] = signal + self[kListener] = () => { + abort(self) + } + + addAbortListener(self[kSignal], self[kListener]) +} + +function removeSignal (self) { + if (!self[kSignal]) { + return + } + + if ('removeEventListener' in self[kSignal]) { + self[kSignal].removeEventListener('abort', self[kListener]) + } else { + self[kSignal].removeListener('abort', self[kListener]) + } + + self[kSignal] = null + self[kListener] = null +} + +module.exports = { + addSignal, + removeSignal +} + + +/***/ }), + +/***/ 4660: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { AsyncResource } = __nccwpck_require__(290) +const { InvalidArgumentError, RequestAbortedError, SocketError } = __nccwpck_require__(8707) +const util = __nccwpck_require__(3440) +const { addSignal, removeSignal } = __nccwpck_require__(158) + +class ConnectHandler extends AsyncResource { + constructor (opts, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } + + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } + + const { signal, opaque, responseHeaders } = opts + + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + + super('UNDICI_CONNECT') + + this.opaque = opaque || null + this.responseHeaders = responseHeaders || null + this.callback = callback + this.abort = null + + addSignal(this, signal) + } + + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } + + this.abort = abort + this.context = context + } + + onHeaders () { + throw new SocketError('bad connect', null) + } + + onUpgrade (statusCode, rawHeaders, socket) { + const { callback, opaque, context } = this + + removeSignal(this) + + this.callback = null + + let headers = rawHeaders + // Indicates is an HTTP2Session + if (headers != null) { + headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + } + + this.runInAsyncScope(callback, null, null, { + statusCode, + headers, + socket, + opaque, + context + }) + } + + onError (err) { + const { callback, opaque } = this + + removeSignal(this) + + if (callback) { + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) + } + } +} + +function connect (opts, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + connect.call(this, opts, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } + + try { + const connectHandler = new ConnectHandler(opts, callback) + this.dispatch({ ...opts, method: 'CONNECT' }, connectHandler) + } catch (err) { + if (typeof callback !== 'function') { + throw err + } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) + } +} + +module.exports = connect + + +/***/ }), + +/***/ 6862: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { + Readable, + Duplex, + PassThrough +} = __nccwpck_require__(2203) +const { + InvalidArgumentError, + InvalidReturnValueError, + RequestAbortedError +} = __nccwpck_require__(8707) +const util = __nccwpck_require__(3440) +const { AsyncResource } = __nccwpck_require__(290) +const { addSignal, removeSignal } = __nccwpck_require__(158) +const assert = __nccwpck_require__(2613) + +const kResume = Symbol('resume') + +class PipelineRequest extends Readable { + constructor () { + super({ autoDestroy: true }) + + this[kResume] = null + } + + _read () { + const { [kResume]: resume } = this + + if (resume) { + this[kResume] = null + resume() + } + } + + _destroy (err, callback) { + this._read() + + callback(err) + } +} + +class PipelineResponse extends Readable { + constructor (resume) { + super({ autoDestroy: true }) + this[kResume] = resume + } + + _read () { + this[kResume]() + } + + _destroy (err, callback) { + if (!err && !this._readableState.endEmitted) { + err = new RequestAbortedError() + } + + callback(err) + } +} + +class PipelineHandler extends AsyncResource { + constructor (opts, handler) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } + + if (typeof handler !== 'function') { + throw new InvalidArgumentError('invalid handler') + } + + const { signal, method, opaque, onInfo, responseHeaders } = opts + + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + + if (method === 'CONNECT') { + throw new InvalidArgumentError('invalid method') + } + + if (onInfo && typeof onInfo !== 'function') { + throw new InvalidArgumentError('invalid onInfo callback') + } + + super('UNDICI_PIPELINE') + + this.opaque = opaque || null + this.responseHeaders = responseHeaders || null + this.handler = handler + this.abort = null + this.context = null + this.onInfo = onInfo || null + + this.req = new PipelineRequest().on('error', util.nop) + + this.ret = new Duplex({ + readableObjectMode: opts.objectMode, + autoDestroy: true, + read: () => { + const { body } = this + + if (body && body.resume) { + body.resume() + } + }, + write: (chunk, encoding, callback) => { + const { req } = this + + if (req.push(chunk, encoding) || req._readableState.destroyed) { + callback() + } else { + req[kResume] = callback + } + }, + destroy: (err, callback) => { + const { body, req, res, ret, abort } = this + + if (!err && !ret._readableState.endEmitted) { + err = new RequestAbortedError() + } + + if (abort && err) { + abort() + } + + util.destroy(body, err) + util.destroy(req, err) + util.destroy(res, err) + + removeSignal(this) + + callback(err) + } + }).on('prefinish', () => { + const { req } = this + + // Node < 15 does not call _final in same tick. + req.push(null) + }) + + this.res = null + + addSignal(this, signal) + } + + onConnect (abort, context) { + const { ret, res } = this + + assert(!res, 'pipeline cannot be retried') + + if (ret.destroyed) { + throw new RequestAbortedError() + } + + this.abort = abort + this.context = context + } + + onHeaders (statusCode, rawHeaders, resume) { + const { opaque, handler, context } = this + + if (statusCode < 200) { + if (this.onInfo) { + const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + this.onInfo({ statusCode, headers }) + } + return + } + + this.res = new PipelineResponse(resume) + + let body + try { + this.handler = null + const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + body = this.runInAsyncScope(handler, null, { + statusCode, + headers, + opaque, + body: this.res, + context + }) + } catch (err) { + this.res.on('error', util.nop) + throw err + } + + if (!body || typeof body.on !== 'function') { + throw new InvalidReturnValueError('expected Readable') + } + + body + .on('data', (chunk) => { + const { ret, body } = this + + if (!ret.push(chunk) && body.pause) { + body.pause() + } + }) + .on('error', (err) => { + const { ret } = this + + util.destroy(ret, err) + }) + .on('end', () => { + const { ret } = this + + ret.push(null) + }) + .on('close', () => { + const { ret } = this + + if (!ret._readableState.ended) { + util.destroy(ret, new RequestAbortedError()) + } + }) + + this.body = body + } + + onData (chunk) { + const { res } = this + return res.push(chunk) + } + + onComplete (trailers) { + const { res } = this + res.push(null) + } + + onError (err) { + const { ret } = this + this.handler = null + util.destroy(ret, err) + } +} + +function pipeline (opts, handler) { + try { + const pipelineHandler = new PipelineHandler(opts, handler) + this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler) + return pipelineHandler.ret + } catch (err) { + return new PassThrough().destroy(err) + } +} + +module.exports = pipeline + + +/***/ }), + +/***/ 4043: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const Readable = __nccwpck_require__(9927) +const { + InvalidArgumentError, + RequestAbortedError +} = __nccwpck_require__(8707) +const util = __nccwpck_require__(3440) +const { getResolveErrorBodyCallback } = __nccwpck_require__(7655) +const { AsyncResource } = __nccwpck_require__(290) +const { addSignal, removeSignal } = __nccwpck_require__(158) + +class RequestHandler extends AsyncResource { + constructor (opts, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } + + const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError, highWaterMark } = opts + + try { + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } + + if (highWaterMark && (typeof highWaterMark !== 'number' || highWaterMark < 0)) { + throw new InvalidArgumentError('invalid highWaterMark') + } + + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + + if (method === 'CONNECT') { + throw new InvalidArgumentError('invalid method') + } + + if (onInfo && typeof onInfo !== 'function') { + throw new InvalidArgumentError('invalid onInfo callback') + } + + super('UNDICI_REQUEST') + } catch (err) { + if (util.isStream(body)) { + util.destroy(body.on('error', util.nop), err) + } + throw err + } + + this.responseHeaders = responseHeaders || null + this.opaque = opaque || null + this.callback = callback + this.res = null + this.abort = null + this.body = body + this.trailers = {} + this.context = null + this.onInfo = onInfo || null + this.throwOnError = throwOnError + this.highWaterMark = highWaterMark + + if (util.isStream(body)) { + body.on('error', (err) => { + this.onError(err) + }) + } + + addSignal(this, signal) + } + + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } + + this.abort = abort + this.context = context + } + + onHeaders (statusCode, rawHeaders, resume, statusMessage) { + const { callback, opaque, abort, context, responseHeaders, highWaterMark } = this + + const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + + if (statusCode < 200) { + if (this.onInfo) { + this.onInfo({ statusCode, headers }) + } + return + } + + const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers + const contentType = parsedHeaders['content-type'] + const body = new Readable({ resume, abort, contentType, highWaterMark }) + + this.callback = null + this.res = body + if (callback !== null) { + if (this.throwOnError && statusCode >= 400) { + this.runInAsyncScope(getResolveErrorBodyCallback, null, + { callback, body, contentType, statusCode, statusMessage, headers } + ) + } else { + this.runInAsyncScope(callback, null, null, { + statusCode, + headers, + trailers: this.trailers, + opaque, + body, + context + }) + } + } + } + + onData (chunk) { + const { res } = this + return res.push(chunk) + } + + onComplete (trailers) { + const { res } = this + + removeSignal(this) + + util.parseHeaders(trailers, this.trailers) + + res.push(null) + } + + onError (err) { + const { res, callback, body, opaque } = this + + removeSignal(this) + + if (callback) { + // TODO: Does this need queueMicrotask? + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) + } + + if (res) { + this.res = null + // Ensure all queued handlers are invoked before destroying res. + queueMicrotask(() => { + util.destroy(res, err) + }) + } + + if (body) { + this.body = null + util.destroy(body, err) + } + } +} + +function request (opts, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + request.call(this, opts, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } + + try { + this.dispatch(opts, new RequestHandler(opts, callback)) + } catch (err) { + if (typeof callback !== 'function') { + throw err + } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) + } +} + +module.exports = request +module.exports.RequestHandler = RequestHandler + + +/***/ }), + +/***/ 3560: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { finished, PassThrough } = __nccwpck_require__(2203) +const { + InvalidArgumentError, + InvalidReturnValueError, + RequestAbortedError +} = __nccwpck_require__(8707) +const util = __nccwpck_require__(3440) +const { getResolveErrorBodyCallback } = __nccwpck_require__(7655) +const { AsyncResource } = __nccwpck_require__(290) +const { addSignal, removeSignal } = __nccwpck_require__(158) + +class StreamHandler extends AsyncResource { + constructor (opts, factory, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } + + const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError } = opts + + try { + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } + + if (typeof factory !== 'function') { + throw new InvalidArgumentError('invalid factory') + } + + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + + if (method === 'CONNECT') { + throw new InvalidArgumentError('invalid method') + } + + if (onInfo && typeof onInfo !== 'function') { + throw new InvalidArgumentError('invalid onInfo callback') + } + + super('UNDICI_STREAM') + } catch (err) { + if (util.isStream(body)) { + util.destroy(body.on('error', util.nop), err) + } + throw err + } + + this.responseHeaders = responseHeaders || null + this.opaque = opaque || null + this.factory = factory + this.callback = callback + this.res = null + this.abort = null + this.context = null + this.trailers = null + this.body = body + this.onInfo = onInfo || null + this.throwOnError = throwOnError || false + + if (util.isStream(body)) { + body.on('error', (err) => { + this.onError(err) + }) + } + + addSignal(this, signal) + } + + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } + + this.abort = abort + this.context = context + } + + onHeaders (statusCode, rawHeaders, resume, statusMessage) { + const { factory, opaque, context, callback, responseHeaders } = this + + const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + + if (statusCode < 200) { + if (this.onInfo) { + this.onInfo({ statusCode, headers }) + } + return + } + + this.factory = null + + let res + + if (this.throwOnError && statusCode >= 400) { + const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers + const contentType = parsedHeaders['content-type'] + res = new PassThrough() + + this.callback = null + this.runInAsyncScope(getResolveErrorBodyCallback, null, + { callback, body: res, contentType, statusCode, statusMessage, headers } + ) + } else { + if (factory === null) { + return + } + + res = this.runInAsyncScope(factory, null, { + statusCode, + headers, + opaque, + context + }) + + if ( + !res || + typeof res.write !== 'function' || + typeof res.end !== 'function' || + typeof res.on !== 'function' + ) { + throw new InvalidReturnValueError('expected Writable') + } + + // TODO: Avoid finished. It registers an unnecessary amount of listeners. + finished(res, { readable: false }, (err) => { + const { callback, res, opaque, trailers, abort } = this + + this.res = null + if (err || !res.readable) { + util.destroy(res, err) + } + + this.callback = null + this.runInAsyncScope(callback, null, err || null, { opaque, trailers }) + + if (err) { + abort() + } + }) + } + + res.on('drain', resume) + + this.res = res + + const needDrain = res.writableNeedDrain !== undefined + ? res.writableNeedDrain + : res._writableState && res._writableState.needDrain + + return needDrain !== true + } + + onData (chunk) { + const { res } = this + + return res ? res.write(chunk) : true + } + + onComplete (trailers) { + const { res } = this + + removeSignal(this) + + if (!res) { + return + } + + this.trailers = util.parseHeaders(trailers) + + res.end() + } + + onError (err) { + const { res, callback, opaque, body } = this + + removeSignal(this) + + this.factory = null + + if (res) { + this.res = null + util.destroy(res, err) + } else if (callback) { + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) + } + + if (body) { + this.body = null + util.destroy(body, err) + } + } +} + +function stream (opts, factory, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + stream.call(this, opts, factory, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } + + try { + this.dispatch(opts, new StreamHandler(opts, factory, callback)) + } catch (err) { + if (typeof callback !== 'function') { + throw err + } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) + } +} + +module.exports = stream + + +/***/ }), + +/***/ 1882: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { InvalidArgumentError, RequestAbortedError, SocketError } = __nccwpck_require__(8707) +const { AsyncResource } = __nccwpck_require__(290) +const util = __nccwpck_require__(3440) +const { addSignal, removeSignal } = __nccwpck_require__(158) +const assert = __nccwpck_require__(2613) + +class UpgradeHandler extends AsyncResource { + constructor (opts, callback) { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('invalid opts') + } + + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } + + const { signal, opaque, responseHeaders } = opts + + if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { + throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') + } + + super('UNDICI_UPGRADE') + + this.responseHeaders = responseHeaders || null + this.opaque = opaque || null + this.callback = callback + this.abort = null + this.context = null + + addSignal(this, signal) + } + + onConnect (abort, context) { + if (!this.callback) { + throw new RequestAbortedError() + } + + this.abort = abort + this.context = null + } + + onHeaders () { + throw new SocketError('bad upgrade', null) + } + + onUpgrade (statusCode, rawHeaders, socket) { + const { callback, opaque, context } = this + + assert.strictEqual(statusCode, 101) + + removeSignal(this) + + this.callback = null + const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) + this.runInAsyncScope(callback, null, null, { + headers, + socket, + opaque, + context + }) + } + + onError (err) { + const { callback, opaque } = this + + removeSignal(this) + + if (callback) { + this.callback = null + queueMicrotask(() => { + this.runInAsyncScope(callback, null, err, { opaque }) + }) + } + } +} + +function upgrade (opts, callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + upgrade.call(this, opts, (err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } + + try { + const upgradeHandler = new UpgradeHandler(opts, callback) + this.dispatch({ + ...opts, + method: opts.method || 'GET', + upgrade: opts.protocol || 'Websocket' + }, upgradeHandler) + } catch (err) { + if (typeof callback !== 'function') { + throw err + } + const opaque = opts && opts.opaque + queueMicrotask(() => callback(err, { opaque })) + } +} + +module.exports = upgrade + + +/***/ }), + +/***/ 6615: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +module.exports.request = __nccwpck_require__(4043) +module.exports.stream = __nccwpck_require__(3560) +module.exports.pipeline = __nccwpck_require__(6862) +module.exports.upgrade = __nccwpck_require__(1882) +module.exports.connect = __nccwpck_require__(4660) + + +/***/ }), + +/***/ 9927: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// Ported from https://github.com/nodejs/undici/pull/907 + + + +const assert = __nccwpck_require__(2613) +const { Readable } = __nccwpck_require__(2203) +const { RequestAbortedError, NotSupportedError, InvalidArgumentError } = __nccwpck_require__(8707) +const util = __nccwpck_require__(3440) +const { ReadableStreamFrom, toUSVString } = __nccwpck_require__(3440) + +let Blob + +const kConsume = Symbol('kConsume') +const kReading = Symbol('kReading') +const kBody = Symbol('kBody') +const kAbort = Symbol('abort') +const kContentType = Symbol('kContentType') + +const noop = () => {} + +module.exports = class BodyReadable extends Readable { + constructor ({ + resume, + abort, + contentType = '', + highWaterMark = 64 * 1024 // Same as nodejs fs streams. + }) { + super({ + autoDestroy: true, + read: resume, + highWaterMark + }) + + this._readableState.dataEmitted = false + + this[kAbort] = abort + this[kConsume] = null + this[kBody] = null + this[kContentType] = contentType + + // Is stream being consumed through Readable API? + // This is an optimization so that we avoid checking + // for 'data' and 'readable' listeners in the hot path + // inside push(). + this[kReading] = false + } + + destroy (err) { + if (this.destroyed) { + // Node < 16 + return this + } + + if (!err && !this._readableState.endEmitted) { + err = new RequestAbortedError() + } + + if (err) { + this[kAbort]() + } + + return super.destroy(err) + } + + emit (ev, ...args) { + if (ev === 'data') { + // Node < 16.7 + this._readableState.dataEmitted = true + } else if (ev === 'error') { + // Node < 16 + this._readableState.errorEmitted = true + } + return super.emit(ev, ...args) + } + + on (ev, ...args) { + if (ev === 'data' || ev === 'readable') { + this[kReading] = true + } + return super.on(ev, ...args) + } + + addListener (ev, ...args) { + return this.on(ev, ...args) + } + + off (ev, ...args) { + const ret = super.off(ev, ...args) + if (ev === 'data' || ev === 'readable') { + this[kReading] = ( + this.listenerCount('data') > 0 || + this.listenerCount('readable') > 0 + ) + } + return ret + } + + removeListener (ev, ...args) { + return this.off(ev, ...args) + } + + push (chunk) { + if (this[kConsume] && chunk !== null && this.readableLength === 0) { + consumePush(this[kConsume], chunk) + return this[kReading] ? super.push(chunk) : true + } + return super.push(chunk) + } + + // https://fetch.spec.whatwg.org/#dom-body-text + async text () { + return consume(this, 'text') + } + + // https://fetch.spec.whatwg.org/#dom-body-json + async json () { + return consume(this, 'json') + } + + // https://fetch.spec.whatwg.org/#dom-body-blob + async blob () { + return consume(this, 'blob') + } + + // https://fetch.spec.whatwg.org/#dom-body-arraybuffer + async arrayBuffer () { + return consume(this, 'arrayBuffer') + } + + // https://fetch.spec.whatwg.org/#dom-body-formdata + async formData () { + // TODO: Implement. + throw new NotSupportedError() + } + + // https://fetch.spec.whatwg.org/#dom-body-bodyused + get bodyUsed () { + return util.isDisturbed(this) + } + + // https://fetch.spec.whatwg.org/#dom-body-body + get body () { + if (!this[kBody]) { + this[kBody] = ReadableStreamFrom(this) + if (this[kConsume]) { + // TODO: Is this the best way to force a lock? + this[kBody].getReader() // Ensure stream is locked. + assert(this[kBody].locked) + } + } + return this[kBody] + } + + dump (opts) { + let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144 + const signal = opts && opts.signal + + if (signal) { + try { + if (typeof signal !== 'object' || !('aborted' in signal)) { + throw new InvalidArgumentError('signal must be an AbortSignal') + } + util.throwIfAborted(signal) + } catch (err) { + return Promise.reject(err) + } + } + + if (this.closed) { + return Promise.resolve(null) + } + + return new Promise((resolve, reject) => { + const signalListenerCleanup = signal + ? util.addAbortListener(signal, () => { + this.destroy() + }) + : noop + + this + .on('close', function () { + signalListenerCleanup() + if (signal && signal.aborted) { + reject(signal.reason || Object.assign(new Error('The operation was aborted'), { name: 'AbortError' })) + } else { + resolve(null) + } + }) + .on('error', noop) + .on('data', function (chunk) { + limit -= chunk.length + if (limit <= 0) { + this.destroy() + } + }) + .resume() + }) + } +} + +// https://streams.spec.whatwg.org/#readablestream-locked +function isLocked (self) { + // Consume is an implicit lock. + return (self[kBody] && self[kBody].locked === true) || self[kConsume] +} + +// https://fetch.spec.whatwg.org/#body-unusable +function isUnusable (self) { + return util.isDisturbed(self) || isLocked(self) +} + +async function consume (stream, type) { + if (isUnusable(stream)) { + throw new TypeError('unusable') + } + + assert(!stream[kConsume]) + + return new Promise((resolve, reject) => { + stream[kConsume] = { + type, + stream, + resolve, + reject, + length: 0, + body: [] + } + + stream + .on('error', function (err) { + consumeFinish(this[kConsume], err) + }) + .on('close', function () { + if (this[kConsume].body !== null) { + consumeFinish(this[kConsume], new RequestAbortedError()) + } + }) + + process.nextTick(consumeStart, stream[kConsume]) + }) +} + +function consumeStart (consume) { + if (consume.body === null) { + return + } + + const { _readableState: state } = consume.stream + + for (const chunk of state.buffer) { + consumePush(consume, chunk) + } + + if (state.endEmitted) { + consumeEnd(this[kConsume]) + } else { + consume.stream.on('end', function () { + consumeEnd(this[kConsume]) + }) + } + + consume.stream.resume() + + while (consume.stream.read() != null) { + // Loop + } +} + +function consumeEnd (consume) { + const { type, body, resolve, stream, length } = consume + + try { + if (type === 'text') { + resolve(toUSVString(Buffer.concat(body))) + } else if (type === 'json') { + resolve(JSON.parse(Buffer.concat(body))) + } else if (type === 'arrayBuffer') { + const dst = new Uint8Array(length) + + let pos = 0 + for (const buf of body) { + dst.set(buf, pos) + pos += buf.byteLength + } + + resolve(dst.buffer) + } else if (type === 'blob') { + if (!Blob) { + Blob = (__nccwpck_require__(181).Blob) + } + resolve(new Blob(body, { type: stream[kContentType] })) + } + + consumeFinish(consume) + } catch (err) { + stream.destroy(err) + } +} + +function consumePush (consume, chunk) { + consume.length += chunk.length + consume.body.push(chunk) +} + +function consumeFinish (consume, err) { + if (consume.body === null) { + return + } + + if (err) { + consume.reject(err) + } else { + consume.resolve() + } + + consume.type = null + consume.stream = null + consume.resolve = null + consume.reject = null + consume.length = 0 + consume.body = null +} + + +/***/ }), + +/***/ 7655: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const assert = __nccwpck_require__(2613) +const { + ResponseStatusCodeError +} = __nccwpck_require__(8707) +const { toUSVString } = __nccwpck_require__(3440) + +async function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) { + assert(body) + + let chunks = [] + let limit = 0 + + for await (const chunk of body) { + chunks.push(chunk) + limit += chunk.length + if (limit > 128 * 1024) { + chunks = null + break + } + } + + if (statusCode === 204 || !contentType || !chunks) { + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers)) + return + } + + try { + if (contentType.startsWith('application/json')) { + const payload = JSON.parse(toUSVString(Buffer.concat(chunks))) + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload)) + return + } + + if (contentType.startsWith('text/')) { + const payload = toUSVString(Buffer.concat(chunks)) + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload)) + return + } + } catch (err) { + // Process in a fallback if error + } + + process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers)) +} + +module.exports = { getResolveErrorBodyCallback } + + +/***/ }), + +/***/ 1093: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { + BalancedPoolMissingUpstreamError, + InvalidArgumentError +} = __nccwpck_require__(8707) +const { + PoolBase, + kClients, + kNeedDrain, + kAddClient, + kRemoveClient, + kGetDispatcher +} = __nccwpck_require__(8640) +const Pool = __nccwpck_require__(5076) +const { kUrl, kInterceptors } = __nccwpck_require__(6443) +const { parseOrigin } = __nccwpck_require__(3440) +const kFactory = Symbol('factory') + +const kOptions = Symbol('options') +const kGreatestCommonDivisor = Symbol('kGreatestCommonDivisor') +const kCurrentWeight = Symbol('kCurrentWeight') +const kIndex = Symbol('kIndex') +const kWeight = Symbol('kWeight') +const kMaxWeightPerServer = Symbol('kMaxWeightPerServer') +const kErrorPenalty = Symbol('kErrorPenalty') + +function getGreatestCommonDivisor (a, b) { + if (b === 0) return a + return getGreatestCommonDivisor(b, a % b) +} + +function defaultFactory (origin, opts) { + return new Pool(origin, opts) +} + +class BalancedPool extends PoolBase { + constructor (upstreams = [], { factory = defaultFactory, ...opts } = {}) { + super() + + this[kOptions] = opts + this[kIndex] = -1 + this[kCurrentWeight] = 0 + + this[kMaxWeightPerServer] = this[kOptions].maxWeightPerServer || 100 + this[kErrorPenalty] = this[kOptions].errorPenalty || 15 + + if (!Array.isArray(upstreams)) { + upstreams = [upstreams] + } + + if (typeof factory !== 'function') { + throw new InvalidArgumentError('factory must be a function.') + } + + this[kInterceptors] = opts.interceptors && opts.interceptors.BalancedPool && Array.isArray(opts.interceptors.BalancedPool) + ? opts.interceptors.BalancedPool + : [] + this[kFactory] = factory + + for (const upstream of upstreams) { + this.addUpstream(upstream) + } + this._updateBalancedPoolStats() + } + + addUpstream (upstream) { + const upstreamOrigin = parseOrigin(upstream).origin + + if (this[kClients].find((pool) => ( + pool[kUrl].origin === upstreamOrigin && + pool.closed !== true && + pool.destroyed !== true + ))) { + return this + } + const pool = this[kFactory](upstreamOrigin, Object.assign({}, this[kOptions])) + + this[kAddClient](pool) + pool.on('connect', () => { + pool[kWeight] = Math.min(this[kMaxWeightPerServer], pool[kWeight] + this[kErrorPenalty]) + }) + + pool.on('connectionError', () => { + pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty]) + this._updateBalancedPoolStats() + }) + + pool.on('disconnect', (...args) => { + const err = args[2] + if (err && err.code === 'UND_ERR_SOCKET') { + // decrease the weight of the pool. + pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty]) + this._updateBalancedPoolStats() + } + }) + + for (const client of this[kClients]) { + client[kWeight] = this[kMaxWeightPerServer] + } + + this._updateBalancedPoolStats() + + return this + } + + _updateBalancedPoolStats () { + this[kGreatestCommonDivisor] = this[kClients].map(p => p[kWeight]).reduce(getGreatestCommonDivisor, 0) + } + + removeUpstream (upstream) { + const upstreamOrigin = parseOrigin(upstream).origin + + const pool = this[kClients].find((pool) => ( + pool[kUrl].origin === upstreamOrigin && + pool.closed !== true && + pool.destroyed !== true + )) + + if (pool) { + this[kRemoveClient](pool) + } + + return this + } + + get upstreams () { + return this[kClients] + .filter(dispatcher => dispatcher.closed !== true && dispatcher.destroyed !== true) + .map((p) => p[kUrl].origin) + } + + [kGetDispatcher] () { + // We validate that pools is greater than 0, + // otherwise we would have to wait until an upstream + // is added, which might never happen. + if (this[kClients].length === 0) { + throw new BalancedPoolMissingUpstreamError() + } + + const dispatcher = this[kClients].find(dispatcher => ( + !dispatcher[kNeedDrain] && + dispatcher.closed !== true && + dispatcher.destroyed !== true + )) + + if (!dispatcher) { + return + } + + const allClientsBusy = this[kClients].map(pool => pool[kNeedDrain]).reduce((a, b) => a && b, true) + + if (allClientsBusy) { + return + } + + let counter = 0 + + let maxWeightIndex = this[kClients].findIndex(pool => !pool[kNeedDrain]) + + while (counter++ < this[kClients].length) { + this[kIndex] = (this[kIndex] + 1) % this[kClients].length + const pool = this[kClients][this[kIndex]] + + // find pool index with the largest weight + if (pool[kWeight] > this[kClients][maxWeightIndex][kWeight] && !pool[kNeedDrain]) { + maxWeightIndex = this[kIndex] + } + + // decrease the current weight every `this[kClients].length`. + if (this[kIndex] === 0) { + // Set the current weight to the next lower weight. + this[kCurrentWeight] = this[kCurrentWeight] - this[kGreatestCommonDivisor] + + if (this[kCurrentWeight] <= 0) { + this[kCurrentWeight] = this[kMaxWeightPerServer] + } + } + if (pool[kWeight] >= this[kCurrentWeight] && (!pool[kNeedDrain])) { + return pool + } + } + + this[kCurrentWeight] = this[kClients][maxWeightIndex][kWeight] + this[kIndex] = maxWeightIndex + return this[kClients][maxWeightIndex] + } +} + +module.exports = BalancedPool + + +/***/ }), + +/***/ 479: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { kConstruct } = __nccwpck_require__(296) +const { urlEquals, fieldValues: getFieldValues } = __nccwpck_require__(3993) +const { kEnumerableProperty, isDisturbed } = __nccwpck_require__(3440) +const { kHeadersList } = __nccwpck_require__(6443) +const { webidl } = __nccwpck_require__(4222) +const { Response, cloneResponse } = __nccwpck_require__(8676) +const { Request } = __nccwpck_require__(5194) +const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(9710) +const { fetching } = __nccwpck_require__(2315) +const { urlIsHttpHttpsScheme, createDeferredPromise, readAllBytes } = __nccwpck_require__(5523) +const assert = __nccwpck_require__(2613) +const { getGlobalDispatcher } = __nccwpck_require__(2581) + +/** + * @see https://w3c.github.io/ServiceWorker/#dfn-cache-batch-operation + * @typedef {Object} CacheBatchOperation + * @property {'delete' | 'put'} type + * @property {any} request + * @property {any} response + * @property {import('../../types/cache').CacheQueryOptions} options + */ + +/** + * @see https://w3c.github.io/ServiceWorker/#dfn-request-response-list + * @typedef {[any, any][]} requestResponseList + */ + +class Cache { + /** + * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-request-response-list + * @type {requestResponseList} + */ + #relevantRequestResponseList + + constructor () { + if (arguments[0] !== kConstruct) { + webidl.illegalConstructor() + } + + this.#relevantRequestResponseList = arguments[1] + } + + async match (request, options = {}) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.match' }) + + request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) + + const p = await this.matchAll(request, options) + + if (p.length === 0) { + return + } + + return p[0] + } + + async matchAll (request = undefined, options = {}) { + webidl.brandCheck(this, Cache) + + if (request !== undefined) request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) + + // 1. + let r = null + + // 2. + if (request !== undefined) { + if (request instanceof Request) { + // 2.1.1 + r = request[kState] + + // 2.1.2 + if (r.method !== 'GET' && !options.ignoreMethod) { + return [] + } + } else if (typeof request === 'string') { + // 2.2.1 + r = new Request(request)[kState] + } + } + + // 5. + // 5.1 + const responses = [] + + // 5.2 + if (request === undefined) { + // 5.2.1 + for (const requestResponse of this.#relevantRequestResponseList) { + responses.push(requestResponse[1]) + } + } else { // 5.3 + // 5.3.1 + const requestResponses = this.#queryCache(r, options) + + // 5.3.2 + for (const requestResponse of requestResponses) { + responses.push(requestResponse[1]) + } + } + + // 5.4 + // We don't implement CORs so we don't need to loop over the responses, yay! + + // 5.5.1 + const responseList = [] + + // 5.5.2 + for (const response of responses) { + // 5.5.2.1 + const responseObject = new Response(response.body?.source ?? null) + const body = responseObject[kState].body + responseObject[kState] = response + responseObject[kState].body = body + responseObject[kHeaders][kHeadersList] = response.headersList + responseObject[kHeaders][kGuard] = 'immutable' + + responseList.push(responseObject) + } + + // 6. + return Object.freeze(responseList) + } + + async add (request) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.add' }) + + request = webidl.converters.RequestInfo(request) + + // 1. + const requests = [request] + + // 2. + const responseArrayPromise = this.addAll(requests) + + // 3. + return await responseArrayPromise + } + + async addAll (requests) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.addAll' }) + + requests = webidl.converters['sequence'](requests) + + // 1. + const responsePromises = [] + + // 2. + const requestList = [] + + // 3. + for (const request of requests) { + if (typeof request === 'string') { + continue + } + + // 3.1 + const r = request[kState] + + // 3.2 + if (!urlIsHttpHttpsScheme(r.url) || r.method !== 'GET') { + throw webidl.errors.exception({ + header: 'Cache.addAll', + message: 'Expected http/s scheme when method is not GET.' + }) + } + } + + // 4. + /** @type {ReturnType[]} */ + const fetchControllers = [] + + // 5. + for (const request of requests) { + // 5.1 + const r = new Request(request)[kState] + + // 5.2 + if (!urlIsHttpHttpsScheme(r.url)) { + throw webidl.errors.exception({ + header: 'Cache.addAll', + message: 'Expected http/s scheme.' + }) + } + + // 5.4 + r.initiator = 'fetch' + r.destination = 'subresource' + + // 5.5 + requestList.push(r) + + // 5.6 + const responsePromise = createDeferredPromise() + + // 5.7 + fetchControllers.push(fetching({ + request: r, + dispatcher: getGlobalDispatcher(), + processResponse (response) { + // 1. + if (response.type === 'error' || response.status === 206 || response.status < 200 || response.status > 299) { + responsePromise.reject(webidl.errors.exception({ + header: 'Cache.addAll', + message: 'Received an invalid status code or the request failed.' + })) + } else if (response.headersList.contains('vary')) { // 2. + // 2.1 + const fieldValues = getFieldValues(response.headersList.get('vary')) + + // 2.2 + for (const fieldValue of fieldValues) { + // 2.2.1 + if (fieldValue === '*') { + responsePromise.reject(webidl.errors.exception({ + header: 'Cache.addAll', + message: 'invalid vary field value' + })) + + for (const controller of fetchControllers) { + controller.abort() + } + + return + } + } + } + }, + processResponseEndOfBody (response) { + // 1. + if (response.aborted) { + responsePromise.reject(new DOMException('aborted', 'AbortError')) + return + } + + // 2. + responsePromise.resolve(response) + } + })) + + // 5.8 + responsePromises.push(responsePromise.promise) + } + + // 6. + const p = Promise.all(responsePromises) + + // 7. + const responses = await p + + // 7.1 + const operations = [] + + // 7.2 + let index = 0 + + // 7.3 + for (const response of responses) { + // 7.3.1 + /** @type {CacheBatchOperation} */ + const operation = { + type: 'put', // 7.3.2 + request: requestList[index], // 7.3.3 + response // 7.3.4 + } + + operations.push(operation) // 7.3.5 + + index++ // 7.3.6 + } + + // 7.5 + const cacheJobPromise = createDeferredPromise() + + // 7.6.1 + let errorData = null + + // 7.6.2 + try { + this.#batchCacheOperations(operations) + } catch (e) { + errorData = e + } + + // 7.6.3 + queueMicrotask(() => { + // 7.6.3.1 + if (errorData === null) { + cacheJobPromise.resolve(undefined) + } else { + // 7.6.3.2 + cacheJobPromise.reject(errorData) + } + }) + + // 7.7 + return cacheJobPromise.promise + } + + async put (request, response) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 2, { header: 'Cache.put' }) + + request = webidl.converters.RequestInfo(request) + response = webidl.converters.Response(response) + + // 1. + let innerRequest = null + + // 2. + if (request instanceof Request) { + innerRequest = request[kState] + } else { // 3. + innerRequest = new Request(request)[kState] + } + + // 4. + if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== 'GET') { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Expected an http/s scheme when method is not GET' + }) + } + + // 5. + const innerResponse = response[kState] + + // 6. + if (innerResponse.status === 206) { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Got 206 status' + }) + } + + // 7. + if (innerResponse.headersList.contains('vary')) { + // 7.1. + const fieldValues = getFieldValues(innerResponse.headersList.get('vary')) + + // 7.2. + for (const fieldValue of fieldValues) { + // 7.2.1 + if (fieldValue === '*') { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Got * vary field value' + }) + } + } + } + + // 8. + if (innerResponse.body && (isDisturbed(innerResponse.body.stream) || innerResponse.body.stream.locked)) { + throw webidl.errors.exception({ + header: 'Cache.put', + message: 'Response body is locked or disturbed' + }) + } + + // 9. + const clonedResponse = cloneResponse(innerResponse) + + // 10. + const bodyReadPromise = createDeferredPromise() + + // 11. + if (innerResponse.body != null) { + // 11.1 + const stream = innerResponse.body.stream + + // 11.2 + const reader = stream.getReader() + + // 11.3 + readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject) + } else { + bodyReadPromise.resolve(undefined) + } + + // 12. + /** @type {CacheBatchOperation[]} */ + const operations = [] + + // 13. + /** @type {CacheBatchOperation} */ + const operation = { + type: 'put', // 14. + request: innerRequest, // 15. + response: clonedResponse // 16. + } + + // 17. + operations.push(operation) + + // 19. + const bytes = await bodyReadPromise.promise + + if (clonedResponse.body != null) { + clonedResponse.body.source = bytes + } + + // 19.1 + const cacheJobPromise = createDeferredPromise() + + // 19.2.1 + let errorData = null + + // 19.2.2 + try { + this.#batchCacheOperations(operations) + } catch (e) { + errorData = e + } + + // 19.2.3 + queueMicrotask(() => { + // 19.2.3.1 + if (errorData === null) { + cacheJobPromise.resolve() + } else { // 19.2.3.2 + cacheJobPromise.reject(errorData) + } + }) + + return cacheJobPromise.promise + } + + async delete (request, options = {}) { + webidl.brandCheck(this, Cache) + webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.delete' }) + + request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) + + /** + * @type {Request} + */ + let r = null + + if (request instanceof Request) { + r = request[kState] + + if (r.method !== 'GET' && !options.ignoreMethod) { + return false + } + } else { + assert(typeof request === 'string') + + r = new Request(request)[kState] + } + + /** @type {CacheBatchOperation[]} */ + const operations = [] + + /** @type {CacheBatchOperation} */ + const operation = { + type: 'delete', + request: r, + options + } + + operations.push(operation) + + const cacheJobPromise = createDeferredPromise() + + let errorData = null + let requestResponses + + try { + requestResponses = this.#batchCacheOperations(operations) + } catch (e) { + errorData = e + } + + queueMicrotask(() => { + if (errorData === null) { + cacheJobPromise.resolve(!!requestResponses?.length) + } else { + cacheJobPromise.reject(errorData) + } + }) + + return cacheJobPromise.promise + } + + /** + * @see https://w3c.github.io/ServiceWorker/#dom-cache-keys + * @param {any} request + * @param {import('../../types/cache').CacheQueryOptions} options + * @returns {readonly Request[]} + */ + async keys (request = undefined, options = {}) { + webidl.brandCheck(this, Cache) + + if (request !== undefined) request = webidl.converters.RequestInfo(request) + options = webidl.converters.CacheQueryOptions(options) + + // 1. + let r = null + + // 2. + if (request !== undefined) { + // 2.1 + if (request instanceof Request) { + // 2.1.1 + r = request[kState] + + // 2.1.2 + if (r.method !== 'GET' && !options.ignoreMethod) { + return [] + } + } else if (typeof request === 'string') { // 2.2 + r = new Request(request)[kState] + } + } + + // 4. + const promise = createDeferredPromise() + + // 5. + // 5.1 + const requests = [] + + // 5.2 + if (request === undefined) { + // 5.2.1 + for (const requestResponse of this.#relevantRequestResponseList) { + // 5.2.1.1 + requests.push(requestResponse[0]) + } + } else { // 5.3 + // 5.3.1 + const requestResponses = this.#queryCache(r, options) + + // 5.3.2 + for (const requestResponse of requestResponses) { + // 5.3.2.1 + requests.push(requestResponse[0]) + } + } + + // 5.4 + queueMicrotask(() => { + // 5.4.1 + const requestList = [] + + // 5.4.2 + for (const request of requests) { + const requestObject = new Request('https://a') + requestObject[kState] = request + requestObject[kHeaders][kHeadersList] = request.headersList + requestObject[kHeaders][kGuard] = 'immutable' + requestObject[kRealm] = request.client + + // 5.4.2.1 + requestList.push(requestObject) + } + + // 5.4.3 + promise.resolve(Object.freeze(requestList)) + }) + + return promise.promise + } + + /** + * @see https://w3c.github.io/ServiceWorker/#batch-cache-operations-algorithm + * @param {CacheBatchOperation[]} operations + * @returns {requestResponseList} + */ + #batchCacheOperations (operations) { + // 1. + const cache = this.#relevantRequestResponseList + + // 2. + const backupCache = [...cache] + + // 3. + const addedItems = [] + + // 4.1 + const resultList = [] + + try { + // 4.2 + for (const operation of operations) { + // 4.2.1 + if (operation.type !== 'delete' && operation.type !== 'put') { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'operation type does not match "delete" or "put"' + }) + } + + // 4.2.2 + if (operation.type === 'delete' && operation.response != null) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'delete operation should not have an associated response' + }) + } + + // 4.2.3 + if (this.#queryCache(operation.request, operation.options, addedItems).length) { + throw new DOMException('???', 'InvalidStateError') + } + + // 4.2.4 + let requestResponses + + // 4.2.5 + if (operation.type === 'delete') { + // 4.2.5.1 + requestResponses = this.#queryCache(operation.request, operation.options) + + // TODO: the spec is wrong, this is needed to pass WPTs + if (requestResponses.length === 0) { + return [] + } + + // 4.2.5.2 + for (const requestResponse of requestResponses) { + const idx = cache.indexOf(requestResponse) + assert(idx !== -1) + + // 4.2.5.2.1 + cache.splice(idx, 1) + } + } else if (operation.type === 'put') { // 4.2.6 + // 4.2.6.1 + if (operation.response == null) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'put operation should have an associated response' + }) + } + + // 4.2.6.2 + const r = operation.request + + // 4.2.6.3 + if (!urlIsHttpHttpsScheme(r.url)) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'expected http or https scheme' + }) + } + + // 4.2.6.4 + if (r.method !== 'GET') { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'not get method' + }) + } + + // 4.2.6.5 + if (operation.options != null) { + throw webidl.errors.exception({ + header: 'Cache.#batchCacheOperations', + message: 'options must not be defined' + }) + } + + // 4.2.6.6 + requestResponses = this.#queryCache(operation.request) + + // 4.2.6.7 + for (const requestResponse of requestResponses) { + const idx = cache.indexOf(requestResponse) + assert(idx !== -1) + + // 4.2.6.7.1 + cache.splice(idx, 1) + } + + // 4.2.6.8 + cache.push([operation.request, operation.response]) + + // 4.2.6.10 + addedItems.push([operation.request, operation.response]) + } + + // 4.2.7 + resultList.push([operation.request, operation.response]) + } + + // 4.3 + return resultList + } catch (e) { // 5. + // 5.1 + this.#relevantRequestResponseList.length = 0 + + // 5.2 + this.#relevantRequestResponseList = backupCache + + // 5.3 + throw e + } + } + + /** + * @see https://w3c.github.io/ServiceWorker/#query-cache + * @param {any} requestQuery + * @param {import('../../types/cache').CacheQueryOptions} options + * @param {requestResponseList} targetStorage + * @returns {requestResponseList} + */ + #queryCache (requestQuery, options, targetStorage) { + /** @type {requestResponseList} */ + const resultList = [] + + const storage = targetStorage ?? this.#relevantRequestResponseList + + for (const requestResponse of storage) { + const [cachedRequest, cachedResponse] = requestResponse + if (this.#requestMatchesCachedItem(requestQuery, cachedRequest, cachedResponse, options)) { + resultList.push(requestResponse) + } + } + + return resultList + } + + /** + * @see https://w3c.github.io/ServiceWorker/#request-matches-cached-item-algorithm + * @param {any} requestQuery + * @param {any} request + * @param {any | null} response + * @param {import('../../types/cache').CacheQueryOptions | undefined} options + * @returns {boolean} + */ + #requestMatchesCachedItem (requestQuery, request, response = null, options) { + // if (options?.ignoreMethod === false && request.method === 'GET') { + // return false + // } + + const queryURL = new URL(requestQuery.url) + + const cachedURL = new URL(request.url) + + if (options?.ignoreSearch) { + cachedURL.search = '' + + queryURL.search = '' + } + + if (!urlEquals(queryURL, cachedURL, true)) { + return false + } + + if ( + response == null || + options?.ignoreVary || + !response.headersList.contains('vary') + ) { + return true + } + + const fieldValues = getFieldValues(response.headersList.get('vary')) + + for (const fieldValue of fieldValues) { + if (fieldValue === '*') { + return false + } + + const requestValue = request.headersList.get(fieldValue) + const queryValue = requestQuery.headersList.get(fieldValue) + + // If one has the header and the other doesn't, or one has + // a different value than the other, return false + if (requestValue !== queryValue) { + return false + } + } + + return true + } +} + +Object.defineProperties(Cache.prototype, { + [Symbol.toStringTag]: { + value: 'Cache', + configurable: true + }, + match: kEnumerableProperty, + matchAll: kEnumerableProperty, + add: kEnumerableProperty, + addAll: kEnumerableProperty, + put: kEnumerableProperty, + delete: kEnumerableProperty, + keys: kEnumerableProperty +}) + +const cacheQueryOptionConverters = [ + { + key: 'ignoreSearch', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'ignoreMethod', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'ignoreVary', + converter: webidl.converters.boolean, + defaultValue: false + } +] + +webidl.converters.CacheQueryOptions = webidl.dictionaryConverter(cacheQueryOptionConverters) + +webidl.converters.MultiCacheQueryOptions = webidl.dictionaryConverter([ + ...cacheQueryOptionConverters, + { + key: 'cacheName', + converter: webidl.converters.DOMString + } +]) + +webidl.converters.Response = webidl.interfaceConverter(Response) + +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.RequestInfo +) + +module.exports = { + Cache +} + + +/***/ }), + +/***/ 4738: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { kConstruct } = __nccwpck_require__(296) +const { Cache } = __nccwpck_require__(479) +const { webidl } = __nccwpck_require__(4222) +const { kEnumerableProperty } = __nccwpck_require__(3440) + +class CacheStorage { + /** + * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-name-to-cache-map + * @type {Map} + */ + async has (cacheName) { + webidl.brandCheck(this, CacheStorage) + webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.has' }) + + cacheName = webidl.converters.DOMString(cacheName) + + // 2.1.1 + // 2.2 + return this.#caches.has(cacheName) + } + + /** + * @see https://w3c.github.io/ServiceWorker/#dom-cachestorage-open + * @param {string} cacheName + * @returns {Promise} + */ + async open (cacheName) { + webidl.brandCheck(this, CacheStorage) + webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.open' }) + + cacheName = webidl.converters.DOMString(cacheName) + + // 2.1 + if (this.#caches.has(cacheName)) { + // await caches.open('v1') !== await caches.open('v1') + + // 2.1.1 + const cache = this.#caches.get(cacheName) + + // 2.1.1.1 + return new Cache(kConstruct, cache) + } + + // 2.2 + const cache = [] + + // 2.3 + this.#caches.set(cacheName, cache) + + // 2.4 + return new Cache(kConstruct, cache) + } + + /** + * @see https://w3c.github.io/ServiceWorker/#cache-storage-delete + * @param {string} cacheName + * @returns {Promise} + */ + async delete (cacheName) { + webidl.brandCheck(this, CacheStorage) + webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.delete' }) + + cacheName = webidl.converters.DOMString(cacheName) + + return this.#caches.delete(cacheName) + } + + /** + * @see https://w3c.github.io/ServiceWorker/#cache-storage-keys + * @returns {string[]} + */ + async keys () { + webidl.brandCheck(this, CacheStorage) + + // 2.1 + const keys = this.#caches.keys() + + // 2.2 + return [...keys] + } +} + +Object.defineProperties(CacheStorage.prototype, { + [Symbol.toStringTag]: { + value: 'CacheStorage', + configurable: true + }, + match: kEnumerableProperty, + has: kEnumerableProperty, + open: kEnumerableProperty, + delete: kEnumerableProperty, + keys: kEnumerableProperty +}) + +module.exports = { + CacheStorage +} + + +/***/ }), + +/***/ 296: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +module.exports = { + kConstruct: (__nccwpck_require__(6443).kConstruct) +} + + +/***/ }), + +/***/ 3993: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const assert = __nccwpck_require__(2613) +const { URLSerializer } = __nccwpck_require__(4322) +const { isValidHeaderName } = __nccwpck_require__(5523) + +/** + * @see https://url.spec.whatwg.org/#concept-url-equals + * @param {URL} A + * @param {URL} B + * @param {boolean | undefined} excludeFragment + * @returns {boolean} + */ +function urlEquals (A, B, excludeFragment = false) { + const serializedA = URLSerializer(A, excludeFragment) + + const serializedB = URLSerializer(B, excludeFragment) + + return serializedA === serializedB +} + +/** + * @see https://github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262 + * @param {string} header + */ +function fieldValues (header) { + assert(header !== null) + + const values = [] + + for (let value of header.split(',')) { + value = value.trim() + + if (!value.length) { + continue + } else if (!isValidHeaderName(value)) { + continue + } + + values.push(value) + } + + return values +} + +module.exports = { + urlEquals, + fieldValues +} + + +/***/ }), + +/***/ 6197: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// @ts-check + + + +/* global WebAssembly */ + +const assert = __nccwpck_require__(2613) +const net = __nccwpck_require__(9278) +const http = __nccwpck_require__(8611) +const { pipeline } = __nccwpck_require__(2203) +const util = __nccwpck_require__(3440) +const timers = __nccwpck_require__(8804) +const Request = __nccwpck_require__(4655) +const DispatcherBase = __nccwpck_require__(1) +const { + RequestContentLengthMismatchError, + ResponseContentLengthMismatchError, + InvalidArgumentError, + RequestAbortedError, + HeadersTimeoutError, + HeadersOverflowError, + SocketError, + InformationalError, + BodyTimeoutError, + HTTPParserError, + ResponseExceededMaxSizeError, + ClientDestroyedError +} = __nccwpck_require__(8707) +const buildConnector = __nccwpck_require__(9136) +const { + kUrl, + kReset, + kServerName, + kClient, + kBusy, + kParser, + kConnect, + kBlocking, + kResuming, + kRunning, + kPending, + kSize, + kWriting, + kQueue, + kConnected, + kConnecting, + kNeedDrain, + kNoRef, + kKeepAliveDefaultTimeout, + kHostHeader, + kPendingIdx, + kRunningIdx, + kError, + kPipelining, + kSocket, + kKeepAliveTimeoutValue, + kMaxHeadersSize, + kKeepAliveMaxTimeout, + kKeepAliveTimeoutThreshold, + kHeadersTimeout, + kBodyTimeout, + kStrictContentLength, + kConnector, + kMaxRedirections, + kMaxRequests, + kCounter, + kClose, + kDestroy, + kDispatch, + kInterceptors, + kLocalAddress, + kMaxResponseSize, + kHTTPConnVersion, + // HTTP2 + kHost, + kHTTP2Session, + kHTTP2SessionState, + kHTTP2BuildRequest, + kHTTP2CopyHeaders, + kHTTP1BuildRequest +} = __nccwpck_require__(6443) + +/** @type {import('http2')} */ +let http2 +try { + http2 = __nccwpck_require__(5675) +} catch { + // @ts-ignore + http2 = { constants: {} } +} + +const { + constants: { + HTTP2_HEADER_AUTHORITY, + HTTP2_HEADER_METHOD, + HTTP2_HEADER_PATH, + HTTP2_HEADER_SCHEME, + HTTP2_HEADER_CONTENT_LENGTH, + HTTP2_HEADER_EXPECT, + HTTP2_HEADER_STATUS + } +} = http2 + +// Experimental +let h2ExperimentalWarned = false + +const FastBuffer = Buffer[Symbol.species] + +const kClosedResolve = Symbol('kClosedResolve') + +const channels = {} + +try { + const diagnosticsChannel = __nccwpck_require__(1637) + channels.sendHeaders = diagnosticsChannel.channel('undici:client:sendHeaders') + channels.beforeConnect = diagnosticsChannel.channel('undici:client:beforeConnect') + channels.connectError = diagnosticsChannel.channel('undici:client:connectError') + channels.connected = diagnosticsChannel.channel('undici:client:connected') +} catch { + channels.sendHeaders = { hasSubscribers: false } + channels.beforeConnect = { hasSubscribers: false } + channels.connectError = { hasSubscribers: false } + channels.connected = { hasSubscribers: false } +} + +/** + * @type {import('../types/client').default} + */ +class Client extends DispatcherBase { + /** + * + * @param {string|URL} url + * @param {import('../types/client').Client.Options} options + */ + constructor (url, { + interceptors, + maxHeaderSize, + headersTimeout, + socketTimeout, + requestTimeout, + connectTimeout, + bodyTimeout, + idleTimeout, + keepAlive, + keepAliveTimeout, + maxKeepAliveTimeout, + keepAliveMaxTimeout, + keepAliveTimeoutThreshold, + socketPath, + pipelining, + tls, + strictContentLength, + maxCachedSessions, + maxRedirections, + connect, + maxRequestsPerClient, + localAddress, + maxResponseSize, + autoSelectFamily, + autoSelectFamilyAttemptTimeout, + // h2 + allowH2, + maxConcurrentStreams + } = {}) { + super() + + if (keepAlive !== undefined) { + throw new InvalidArgumentError('unsupported keepAlive, use pipelining=0 instead') + } + + if (socketTimeout !== undefined) { + throw new InvalidArgumentError('unsupported socketTimeout, use headersTimeout & bodyTimeout instead') + } + + if (requestTimeout !== undefined) { + throw new InvalidArgumentError('unsupported requestTimeout, use headersTimeout & bodyTimeout instead') + } + + if (idleTimeout !== undefined) { + throw new InvalidArgumentError('unsupported idleTimeout, use keepAliveTimeout instead') + } + + if (maxKeepAliveTimeout !== undefined) { + throw new InvalidArgumentError('unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead') + } + + if (maxHeaderSize != null && !Number.isFinite(maxHeaderSize)) { + throw new InvalidArgumentError('invalid maxHeaderSize') + } + + if (socketPath != null && typeof socketPath !== 'string') { + throw new InvalidArgumentError('invalid socketPath') + } + + if (connectTimeout != null && (!Number.isFinite(connectTimeout) || connectTimeout < 0)) { + throw new InvalidArgumentError('invalid connectTimeout') + } + + if (keepAliveTimeout != null && (!Number.isFinite(keepAliveTimeout) || keepAliveTimeout <= 0)) { + throw new InvalidArgumentError('invalid keepAliveTimeout') + } + + if (keepAliveMaxTimeout != null && (!Number.isFinite(keepAliveMaxTimeout) || keepAliveMaxTimeout <= 0)) { + throw new InvalidArgumentError('invalid keepAliveMaxTimeout') + } + + if (keepAliveTimeoutThreshold != null && !Number.isFinite(keepAliveTimeoutThreshold)) { + throw new InvalidArgumentError('invalid keepAliveTimeoutThreshold') + } + + if (headersTimeout != null && (!Number.isInteger(headersTimeout) || headersTimeout < 0)) { + throw new InvalidArgumentError('headersTimeout must be a positive integer or zero') + } + + if (bodyTimeout != null && (!Number.isInteger(bodyTimeout) || bodyTimeout < 0)) { + throw new InvalidArgumentError('bodyTimeout must be a positive integer or zero') + } + + if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { + throw new InvalidArgumentError('connect must be a function or an object') + } + + if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) { + throw new InvalidArgumentError('maxRedirections must be a positive number') + } + + if (maxRequestsPerClient != null && (!Number.isInteger(maxRequestsPerClient) || maxRequestsPerClient < 0)) { + throw new InvalidArgumentError('maxRequestsPerClient must be a positive number') + } + + if (localAddress != null && (typeof localAddress !== 'string' || net.isIP(localAddress) === 0)) { + throw new InvalidArgumentError('localAddress must be valid string IP address') + } + + if (maxResponseSize != null && (!Number.isInteger(maxResponseSize) || maxResponseSize < -1)) { + throw new InvalidArgumentError('maxResponseSize must be a positive number') + } + + if ( + autoSelectFamilyAttemptTimeout != null && + (!Number.isInteger(autoSelectFamilyAttemptTimeout) || autoSelectFamilyAttemptTimeout < -1) + ) { + throw new InvalidArgumentError('autoSelectFamilyAttemptTimeout must be a positive number') + } + + // h2 + if (allowH2 != null && typeof allowH2 !== 'boolean') { + throw new InvalidArgumentError('allowH2 must be a valid boolean value') + } + + if (maxConcurrentStreams != null && (typeof maxConcurrentStreams !== 'number' || maxConcurrentStreams < 1)) { + throw new InvalidArgumentError('maxConcurrentStreams must be a possitive integer, greater than 0') + } + + if (typeof connect !== 'function') { + connect = buildConnector({ + ...tls, + maxCachedSessions, + allowH2, + socketPath, + timeout: connectTimeout, + ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined), + ...connect + }) + } + + this[kInterceptors] = interceptors && interceptors.Client && Array.isArray(interceptors.Client) + ? interceptors.Client + : [createRedirectInterceptor({ maxRedirections })] + this[kUrl] = util.parseOrigin(url) + this[kConnector] = connect + this[kSocket] = null + this[kPipelining] = pipelining != null ? pipelining : 1 + this[kMaxHeadersSize] = maxHeaderSize || http.maxHeaderSize + this[kKeepAliveDefaultTimeout] = keepAliveTimeout == null ? 4e3 : keepAliveTimeout + this[kKeepAliveMaxTimeout] = keepAliveMaxTimeout == null ? 600e3 : keepAliveMaxTimeout + this[kKeepAliveTimeoutThreshold] = keepAliveTimeoutThreshold == null ? 1e3 : keepAliveTimeoutThreshold + this[kKeepAliveTimeoutValue] = this[kKeepAliveDefaultTimeout] + this[kServerName] = null + this[kLocalAddress] = localAddress != null ? localAddress : null + this[kResuming] = 0 // 0, idle, 1, scheduled, 2 resuming + this[kNeedDrain] = 0 // 0, idle, 1, scheduled, 2 resuming + this[kHostHeader] = `host: ${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}\r\n` + this[kBodyTimeout] = bodyTimeout != null ? bodyTimeout : 300e3 + this[kHeadersTimeout] = headersTimeout != null ? headersTimeout : 300e3 + this[kStrictContentLength] = strictContentLength == null ? true : strictContentLength + this[kMaxRedirections] = maxRedirections + this[kMaxRequests] = maxRequestsPerClient + this[kClosedResolve] = null + this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1 + this[kHTTPConnVersion] = 'h1' + + // HTTP/2 + this[kHTTP2Session] = null + this[kHTTP2SessionState] = !allowH2 + ? null + : { + // streams: null, // Fixed queue of streams - For future support of `push` + openStreams: 0, // Keep track of them to decide wether or not unref the session + maxConcurrentStreams: maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server + } + this[kHost] = `${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}` + + // kQueue is built up of 3 sections separated by + // the kRunningIdx and kPendingIdx indices. + // | complete | running | pending | + // ^ kRunningIdx ^ kPendingIdx ^ kQueue.length + // kRunningIdx points to the first running element. + // kPendingIdx points to the first pending element. + // This implements a fast queue with an amortized + // time of O(1). + + this[kQueue] = [] + this[kRunningIdx] = 0 + this[kPendingIdx] = 0 + } + + get pipelining () { + return this[kPipelining] + } + + set pipelining (value) { + this[kPipelining] = value + resume(this, true) + } + + get [kPending] () { + return this[kQueue].length - this[kPendingIdx] + } + + get [kRunning] () { + return this[kPendingIdx] - this[kRunningIdx] + } + + get [kSize] () { + return this[kQueue].length - this[kRunningIdx] + } + + get [kConnected] () { + return !!this[kSocket] && !this[kConnecting] && !this[kSocket].destroyed + } + + get [kBusy] () { + const socket = this[kSocket] + return ( + (socket && (socket[kReset] || socket[kWriting] || socket[kBlocking])) || + (this[kSize] >= (this[kPipelining] || 1)) || + this[kPending] > 0 + ) + } + + /* istanbul ignore: only used for test */ + [kConnect] (cb) { + connect(this) + this.once('connect', cb) + } + + [kDispatch] (opts, handler) { + const origin = opts.origin || this[kUrl].origin + + const request = this[kHTTPConnVersion] === 'h2' + ? Request[kHTTP2BuildRequest](origin, opts, handler) + : Request[kHTTP1BuildRequest](origin, opts, handler) + + this[kQueue].push(request) + if (this[kResuming]) { + // Do nothing. + } else if (util.bodyLength(request.body) == null && util.isIterable(request.body)) { + // Wait a tick in case stream/iterator is ended in the same tick. + this[kResuming] = 1 + process.nextTick(resume, this) + } else { + resume(this, true) + } + + if (this[kResuming] && this[kNeedDrain] !== 2 && this[kBusy]) { + this[kNeedDrain] = 2 + } + + return this[kNeedDrain] < 2 + } + + async [kClose] () { + // TODO: for H2 we need to gracefully flush the remaining enqueued + // request and close each stream. + return new Promise((resolve) => { + if (!this[kSize]) { + resolve(null) + } else { + this[kClosedResolve] = resolve + } + }) + } + + async [kDestroy] (err) { + return new Promise((resolve) => { + const requests = this[kQueue].splice(this[kPendingIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(this, request, err) + } + + const callback = () => { + if (this[kClosedResolve]) { + // TODO (fix): Should we error here with ClientDestroyedError? + this[kClosedResolve]() + this[kClosedResolve] = null + } + resolve() + } + + if (this[kHTTP2Session] != null) { + util.destroy(this[kHTTP2Session], err) + this[kHTTP2Session] = null + this[kHTTP2SessionState] = null + } + + if (!this[kSocket]) { + queueMicrotask(callback) + } else { + util.destroy(this[kSocket].on('close', callback), err) + } + + resume(this) + }) + } +} + +function onHttp2SessionError (err) { + assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID') + + this[kSocket][kError] = err + + onError(this[kClient], err) +} + +function onHttp2FrameError (type, code, id) { + const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`) + + if (id === 0) { + this[kSocket][kError] = err + onError(this[kClient], err) + } +} + +function onHttp2SessionEnd () { + util.destroy(this, new SocketError('other side closed')) + util.destroy(this[kSocket], new SocketError('other side closed')) +} + +function onHTTP2GoAway (code) { + const client = this[kClient] + const err = new InformationalError(`HTTP/2: "GOAWAY" frame received with code ${code}`) + client[kSocket] = null + client[kHTTP2Session] = null + + if (client.destroyed) { + assert(this[kPending] === 0) + + // Fail entire queue. + const requests = client[kQueue].splice(client[kRunningIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(this, request, err) + } + } else if (client[kRunning] > 0) { + // Fail head of pipeline. + const request = client[kQueue][client[kRunningIdx]] + client[kQueue][client[kRunningIdx]++] = null + + errorRequest(client, request, err) + } + + client[kPendingIdx] = client[kRunningIdx] + + assert(client[kRunning] === 0) + + client.emit('disconnect', + client[kUrl], + [client], + err + ) + + resume(client) +} + +const constants = __nccwpck_require__(2824) +const createRedirectInterceptor = __nccwpck_require__(4415) +const EMPTY_BUF = Buffer.alloc(0) + +async function lazyllhttp () { + const llhttpWasmData = process.env.JEST_WORKER_ID ? __nccwpck_require__(3870) : undefined + + let mod + try { + mod = await WebAssembly.compile(Buffer.from(__nccwpck_require__(3434), 'base64')) + } catch (e) { + /* istanbul ignore next */ + + // We could check if the error was caused by the simd option not + // being enabled, but the occurring of this other error + // * https://github.com/emscripten-core/emscripten/issues/11495 + // got me to remove that check to avoid breaking Node 12. + mod = await WebAssembly.compile(Buffer.from(llhttpWasmData || __nccwpck_require__(3870), 'base64')) + } + + return await WebAssembly.instantiate(mod, { + env: { + /* eslint-disable camelcase */ + + wasm_on_url: (p, at, len) => { + /* istanbul ignore next */ + return 0 + }, + wasm_on_status: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onStatus(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_message_begin: (p) => { + assert.strictEqual(currentParser.ptr, p) + return currentParser.onMessageBegin() || 0 + }, + wasm_on_header_field: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onHeaderField(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_header_value: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onHeaderValue(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_headers_complete: (p, statusCode, upgrade, shouldKeepAlive) => { + assert.strictEqual(currentParser.ptr, p) + return currentParser.onHeadersComplete(statusCode, Boolean(upgrade), Boolean(shouldKeepAlive)) || 0 + }, + wasm_on_body: (p, at, len) => { + assert.strictEqual(currentParser.ptr, p) + const start = at - currentBufferPtr + currentBufferRef.byteOffset + return currentParser.onBody(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 + }, + wasm_on_message_complete: (p) => { + assert.strictEqual(currentParser.ptr, p) + return currentParser.onMessageComplete() || 0 + } + + /* eslint-enable camelcase */ + } + }) +} + +let llhttpInstance = null +let llhttpPromise = lazyllhttp() +llhttpPromise.catch() + +let currentParser = null +let currentBufferRef = null +let currentBufferSize = 0 +let currentBufferPtr = null + +const TIMEOUT_HEADERS = 1 +const TIMEOUT_BODY = 2 +const TIMEOUT_IDLE = 3 + +class Parser { + constructor (client, socket, { exports }) { + assert(Number.isFinite(client[kMaxHeadersSize]) && client[kMaxHeadersSize] > 0) + + this.llhttp = exports + this.ptr = this.llhttp.llhttp_alloc(constants.TYPE.RESPONSE) + this.client = client + this.socket = socket + this.timeout = null + this.timeoutValue = null + this.timeoutType = null + this.statusCode = null + this.statusText = '' + this.upgrade = false + this.headers = [] + this.headersSize = 0 + this.headersMaxSize = client[kMaxHeadersSize] + this.shouldKeepAlive = false + this.paused = false + this.resume = this.resume.bind(this) + + this.bytesRead = 0 + + this.keepAlive = '' + this.contentLength = '' + this.connection = '' + this.maxResponseSize = client[kMaxResponseSize] + } + + setTimeout (value, type) { + this.timeoutType = type + if (value !== this.timeoutValue) { + timers.clearTimeout(this.timeout) + if (value) { + this.timeout = timers.setTimeout(onParserTimeout, value, this) + // istanbul ignore else: only for jest + if (this.timeout.unref) { + this.timeout.unref() + } + } else { + this.timeout = null + } + this.timeoutValue = value + } else if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() + } + } + } + + resume () { + if (this.socket.destroyed || !this.paused) { + return + } + + assert(this.ptr != null) + assert(currentParser == null) + + this.llhttp.llhttp_resume(this.ptr) + + assert(this.timeoutType === TIMEOUT_BODY) + if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() + } + } + + this.paused = false + this.execute(this.socket.read() || EMPTY_BUF) // Flush parser. + this.readMore() + } + + readMore () { + while (!this.paused && this.ptr) { + const chunk = this.socket.read() + if (chunk === null) { + break + } + this.execute(chunk) + } + } + + execute (data) { + assert(this.ptr != null) + assert(currentParser == null) + assert(!this.paused) + + const { socket, llhttp } = this + + if (data.length > currentBufferSize) { + if (currentBufferPtr) { + llhttp.free(currentBufferPtr) + } + currentBufferSize = Math.ceil(data.length / 4096) * 4096 + currentBufferPtr = llhttp.malloc(currentBufferSize) + } + + new Uint8Array(llhttp.memory.buffer, currentBufferPtr, currentBufferSize).set(data) + + // Call `execute` on the wasm parser. + // We pass the `llhttp_parser` pointer address, the pointer address of buffer view data, + // and finally the length of bytes to parse. + // The return value is an error code or `constants.ERROR.OK`. + try { + let ret + + try { + currentBufferRef = data + currentParser = this + ret = llhttp.llhttp_execute(this.ptr, currentBufferPtr, data.length) + /* eslint-disable-next-line no-useless-catch */ + } catch (err) { + /* istanbul ignore next: difficult to make a test case for */ + throw err + } finally { + currentParser = null + currentBufferRef = null + } + + const offset = llhttp.llhttp_get_error_pos(this.ptr) - currentBufferPtr + + if (ret === constants.ERROR.PAUSED_UPGRADE) { + this.onUpgrade(data.slice(offset)) + } else if (ret === constants.ERROR.PAUSED) { + this.paused = true + socket.unshift(data.slice(offset)) + } else if (ret !== constants.ERROR.OK) { + const ptr = llhttp.llhttp_get_error_reason(this.ptr) + let message = '' + /* istanbul ignore else: difficult to make a test case for */ + if (ptr) { + const len = new Uint8Array(llhttp.memory.buffer, ptr).indexOf(0) + message = + 'Response does not match the HTTP/1.1 protocol (' + + Buffer.from(llhttp.memory.buffer, ptr, len).toString() + + ')' + } + throw new HTTPParserError(message, constants.ERROR[ret], data.slice(offset)) + } + } catch (err) { + util.destroy(socket, err) + } + } + + destroy () { + assert(this.ptr != null) + assert(currentParser == null) + + this.llhttp.llhttp_free(this.ptr) + this.ptr = null + + timers.clearTimeout(this.timeout) + this.timeout = null + this.timeoutValue = null + this.timeoutType = null + + this.paused = false + } + + onStatus (buf) { + this.statusText = buf.toString() + } + + onMessageBegin () { + const { socket, client } = this + + /* istanbul ignore next: difficult to make a test case for */ + if (socket.destroyed) { + return -1 + } + + const request = client[kQueue][client[kRunningIdx]] + if (!request) { + return -1 + } + } + + onHeaderField (buf) { + const len = this.headers.length + + if ((len & 1) === 0) { + this.headers.push(buf) + } else { + this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf]) + } + + this.trackHeader(buf.length) + } + + onHeaderValue (buf) { + let len = this.headers.length + + if ((len & 1) === 1) { + this.headers.push(buf) + len += 1 + } else { + this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf]) + } + + const key = this.headers[len - 2] + if (key.length === 10 && key.toString().toLowerCase() === 'keep-alive') { + this.keepAlive += buf.toString() + } else if (key.length === 10 && key.toString().toLowerCase() === 'connection') { + this.connection += buf.toString() + } else if (key.length === 14 && key.toString().toLowerCase() === 'content-length') { + this.contentLength += buf.toString() + } + + this.trackHeader(buf.length) + } + + trackHeader (len) { + this.headersSize += len + if (this.headersSize >= this.headersMaxSize) { + util.destroy(this.socket, new HeadersOverflowError()) + } + } + + onUpgrade (head) { + const { upgrade, client, socket, headers, statusCode } = this + + assert(upgrade) + + const request = client[kQueue][client[kRunningIdx]] + assert(request) + + assert(!socket.destroyed) + assert(socket === client[kSocket]) + assert(!this.paused) + assert(request.upgrade || request.method === 'CONNECT') + + this.statusCode = null + this.statusText = '' + this.shouldKeepAlive = null + + assert(this.headers.length % 2 === 0) + this.headers = [] + this.headersSize = 0 + + socket.unshift(head) + + socket[kParser].destroy() + socket[kParser] = null + + socket[kClient] = null + socket[kError] = null + socket + .removeListener('error', onSocketError) + .removeListener('readable', onSocketReadable) + .removeListener('end', onSocketEnd) + .removeListener('close', onSocketClose) + + client[kSocket] = null + client[kQueue][client[kRunningIdx]++] = null + client.emit('disconnect', client[kUrl], [client], new InformationalError('upgrade')) + + try { + request.onUpgrade(statusCode, headers, socket) + } catch (err) { + util.destroy(socket, err) + } + + resume(client) + } + + onHeadersComplete (statusCode, upgrade, shouldKeepAlive) { + const { client, socket, headers, statusText } = this + + /* istanbul ignore next: difficult to make a test case for */ + if (socket.destroyed) { + return -1 + } + + const request = client[kQueue][client[kRunningIdx]] + + /* istanbul ignore next: difficult to make a test case for */ + if (!request) { + return -1 + } + + assert(!this.upgrade) + assert(this.statusCode < 200) + + if (statusCode === 100) { + util.destroy(socket, new SocketError('bad response', util.getSocketInfo(socket))) + return -1 + } + + /* this can only happen if server is misbehaving */ + if (upgrade && !request.upgrade) { + util.destroy(socket, new SocketError('bad upgrade', util.getSocketInfo(socket))) + return -1 + } + + assert.strictEqual(this.timeoutType, TIMEOUT_HEADERS) + + this.statusCode = statusCode + this.shouldKeepAlive = ( + shouldKeepAlive || + // Override llhttp value which does not allow keepAlive for HEAD. + (request.method === 'HEAD' && !socket[kReset] && this.connection.toLowerCase() === 'keep-alive') + ) + + if (this.statusCode >= 200) { + const bodyTimeout = request.bodyTimeout != null + ? request.bodyTimeout + : client[kBodyTimeout] + this.setTimeout(bodyTimeout, TIMEOUT_BODY) + } else if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() + } + } + + if (request.method === 'CONNECT') { + assert(client[kRunning] === 1) + this.upgrade = true + return 2 + } + + if (upgrade) { + assert(client[kRunning] === 1) + this.upgrade = true + return 2 + } + + assert(this.headers.length % 2 === 0) + this.headers = [] + this.headersSize = 0 + + if (this.shouldKeepAlive && client[kPipelining]) { + const keepAliveTimeout = this.keepAlive ? util.parseKeepAliveTimeout(this.keepAlive) : null + + if (keepAliveTimeout != null) { + const timeout = Math.min( + keepAliveTimeout - client[kKeepAliveTimeoutThreshold], + client[kKeepAliveMaxTimeout] + ) + if (timeout <= 0) { + socket[kReset] = true + } else { + client[kKeepAliveTimeoutValue] = timeout + } + } else { + client[kKeepAliveTimeoutValue] = client[kKeepAliveDefaultTimeout] + } + } else { + // Stop more requests from being dispatched. + socket[kReset] = true + } + + const pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false + + if (request.aborted) { + return -1 + } + + if (request.method === 'HEAD') { + return 1 + } + + if (statusCode < 200) { + return 1 + } + + if (socket[kBlocking]) { + socket[kBlocking] = false + resume(client) + } + + return pause ? constants.ERROR.PAUSED : 0 + } + + onBody (buf) { + const { client, socket, statusCode, maxResponseSize } = this + + if (socket.destroyed) { + return -1 + } + + const request = client[kQueue][client[kRunningIdx]] + assert(request) + + assert.strictEqual(this.timeoutType, TIMEOUT_BODY) + if (this.timeout) { + // istanbul ignore else: only for jest + if (this.timeout.refresh) { + this.timeout.refresh() + } + } + + assert(statusCode >= 200) + + if (maxResponseSize > -1 && this.bytesRead + buf.length > maxResponseSize) { + util.destroy(socket, new ResponseExceededMaxSizeError()) + return -1 + } + + this.bytesRead += buf.length + + if (request.onData(buf) === false) { + return constants.ERROR.PAUSED + } + } + + onMessageComplete () { + const { client, socket, statusCode, upgrade, headers, contentLength, bytesRead, shouldKeepAlive } = this + + if (socket.destroyed && (!statusCode || shouldKeepAlive)) { + return -1 + } + + if (upgrade) { + return + } + + const request = client[kQueue][client[kRunningIdx]] + assert(request) + + assert(statusCode >= 100) + + this.statusCode = null + this.statusText = '' + this.bytesRead = 0 + this.contentLength = '' + this.keepAlive = '' + this.connection = '' + + assert(this.headers.length % 2 === 0) + this.headers = [] + this.headersSize = 0 + + if (statusCode < 200) { + return + } + + /* istanbul ignore next: should be handled by llhttp? */ + if (request.method !== 'HEAD' && contentLength && bytesRead !== parseInt(contentLength, 10)) { + util.destroy(socket, new ResponseContentLengthMismatchError()) + return -1 + } + + request.onComplete(headers) + + client[kQueue][client[kRunningIdx]++] = null + + if (socket[kWriting]) { + assert.strictEqual(client[kRunning], 0) + // Response completed before request. + util.destroy(socket, new InformationalError('reset')) + return constants.ERROR.PAUSED + } else if (!shouldKeepAlive) { + util.destroy(socket, new InformationalError('reset')) + return constants.ERROR.PAUSED + } else if (socket[kReset] && client[kRunning] === 0) { + // Destroy socket once all requests have completed. + // The request at the tail of the pipeline is the one + // that requested reset and no further requests should + // have been queued since then. + util.destroy(socket, new InformationalError('reset')) + return constants.ERROR.PAUSED + } else if (client[kPipelining] === 1) { + // We must wait a full event loop cycle to reuse this socket to make sure + // that non-spec compliant servers are not closing the connection even if they + // said they won't. + setImmediate(resume, client) + } else { + resume(client) + } + } +} + +function onParserTimeout (parser) { + const { socket, timeoutType, client } = parser + + /* istanbul ignore else */ + if (timeoutType === TIMEOUT_HEADERS) { + if (!socket[kWriting] || socket.writableNeedDrain || client[kRunning] > 1) { + assert(!parser.paused, 'cannot be paused while waiting for headers') + util.destroy(socket, new HeadersTimeoutError()) + } + } else if (timeoutType === TIMEOUT_BODY) { + if (!parser.paused) { + util.destroy(socket, new BodyTimeoutError()) + } + } else if (timeoutType === TIMEOUT_IDLE) { + assert(client[kRunning] === 0 && client[kKeepAliveTimeoutValue]) + util.destroy(socket, new InformationalError('socket idle timeout')) + } +} + +function onSocketReadable () { + const { [kParser]: parser } = this + if (parser) { + parser.readMore() + } +} + +function onSocketError (err) { + const { [kClient]: client, [kParser]: parser } = this + + assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID') + + if (client[kHTTPConnVersion] !== 'h2') { + // On Mac OS, we get an ECONNRESET even if there is a full body to be forwarded + // to the user. + if (err.code === 'ECONNRESET' && parser.statusCode && !parser.shouldKeepAlive) { + // We treat all incoming data so for as a valid response. + parser.onMessageComplete() + return + } + } + + this[kError] = err + + onError(this[kClient], err) +} + +function onError (client, err) { + if ( + client[kRunning] === 0 && + err.code !== 'UND_ERR_INFO' && + err.code !== 'UND_ERR_SOCKET' + ) { + // Error is not caused by running request and not a recoverable + // socket error. + + assert(client[kPendingIdx] === client[kRunningIdx]) + + const requests = client[kQueue].splice(client[kRunningIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(client, request, err) + } + assert(client[kSize] === 0) + } +} + +function onSocketEnd () { + const { [kParser]: parser, [kClient]: client } = this + + if (client[kHTTPConnVersion] !== 'h2') { + if (parser.statusCode && !parser.shouldKeepAlive) { + // We treat all incoming data so far as a valid response. + parser.onMessageComplete() + return + } + } + + util.destroy(this, new SocketError('other side closed', util.getSocketInfo(this))) +} + +function onSocketClose () { + const { [kClient]: client, [kParser]: parser } = this + + if (client[kHTTPConnVersion] === 'h1' && parser) { + if (!this[kError] && parser.statusCode && !parser.shouldKeepAlive) { + // We treat all incoming data so far as a valid response. + parser.onMessageComplete() + } + + this[kParser].destroy() + this[kParser] = null + } + + const err = this[kError] || new SocketError('closed', util.getSocketInfo(this)) + + client[kSocket] = null + + if (client.destroyed) { + assert(client[kPending] === 0) + + // Fail entire queue. + const requests = client[kQueue].splice(client[kRunningIdx]) + for (let i = 0; i < requests.length; i++) { + const request = requests[i] + errorRequest(client, request, err) + } + } else if (client[kRunning] > 0 && err.code !== 'UND_ERR_INFO') { + // Fail head of pipeline. + const request = client[kQueue][client[kRunningIdx]] + client[kQueue][client[kRunningIdx]++] = null + + errorRequest(client, request, err) + } + + client[kPendingIdx] = client[kRunningIdx] + + assert(client[kRunning] === 0) + + client.emit('disconnect', client[kUrl], [client], err) + + resume(client) +} + +async function connect (client) { + assert(!client[kConnecting]) + assert(!client[kSocket]) + + let { host, hostname, protocol, port } = client[kUrl] + + // Resolve ipv6 + if (hostname[0] === '[') { + const idx = hostname.indexOf(']') + + assert(idx !== -1) + const ip = hostname.substring(1, idx) + + assert(net.isIP(ip)) + hostname = ip + } + + client[kConnecting] = true + + if (channels.beforeConnect.hasSubscribers) { + channels.beforeConnect.publish({ + connectParams: { + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, + connector: client[kConnector] + }) + } + + try { + const socket = await new Promise((resolve, reject) => { + client[kConnector]({ + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, (err, socket) => { + if (err) { + reject(err) + } else { + resolve(socket) + } + }) + }) + + if (client.destroyed) { + util.destroy(socket.on('error', () => {}), new ClientDestroyedError()) + return + } + + client[kConnecting] = false + + assert(socket) + + const isH2 = socket.alpnProtocol === 'h2' + if (isH2) { + if (!h2ExperimentalWarned) { + h2ExperimentalWarned = true + process.emitWarning('H2 support is experimental, expect them to change at any time.', { + code: 'UNDICI-H2' + }) + } + + const session = http2.connect(client[kUrl], { + createConnection: () => socket, + peerMaxConcurrentStreams: client[kHTTP2SessionState].maxConcurrentStreams + }) + + client[kHTTPConnVersion] = 'h2' + session[kClient] = client + session[kSocket] = socket + session.on('error', onHttp2SessionError) + session.on('frameError', onHttp2FrameError) + session.on('end', onHttp2SessionEnd) + session.on('goaway', onHTTP2GoAway) + session.on('close', onSocketClose) + session.unref() + + client[kHTTP2Session] = session + socket[kHTTP2Session] = session + } else { + if (!llhttpInstance) { + llhttpInstance = await llhttpPromise + llhttpPromise = null + } + + socket[kNoRef] = false + socket[kWriting] = false + socket[kReset] = false + socket[kBlocking] = false + socket[kParser] = new Parser(client, socket, llhttpInstance) + } + + socket[kCounter] = 0 + socket[kMaxRequests] = client[kMaxRequests] + socket[kClient] = client + socket[kError] = null + + socket + .on('error', onSocketError) + .on('readable', onSocketReadable) + .on('end', onSocketEnd) + .on('close', onSocketClose) + + client[kSocket] = socket + + if (channels.connected.hasSubscribers) { + channels.connected.publish({ + connectParams: { + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, + connector: client[kConnector], + socket + }) + } + client.emit('connect', client[kUrl], [client]) + } catch (err) { + if (client.destroyed) { + return + } + + client[kConnecting] = false + + if (channels.connectError.hasSubscribers) { + channels.connectError.publish({ + connectParams: { + host, + hostname, + protocol, + port, + servername: client[kServerName], + localAddress: client[kLocalAddress] + }, + connector: client[kConnector], + error: err + }) + } + + if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') { + assert(client[kRunning] === 0) + while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) { + const request = client[kQueue][client[kPendingIdx]++] + errorRequest(client, request, err) + } + } else { + onError(client, err) + } + + client.emit('connectionError', client[kUrl], [client], err) + } + + resume(client) +} + +function emitDrain (client) { + client[kNeedDrain] = 0 + client.emit('drain', client[kUrl], [client]) +} + +function resume (client, sync) { + if (client[kResuming] === 2) { + return + } + + client[kResuming] = 2 + + _resume(client, sync) + client[kResuming] = 0 + + if (client[kRunningIdx] > 256) { + client[kQueue].splice(0, client[kRunningIdx]) + client[kPendingIdx] -= client[kRunningIdx] + client[kRunningIdx] = 0 + } +} + +function _resume (client, sync) { + while (true) { + if (client.destroyed) { + assert(client[kPending] === 0) + return + } + + if (client[kClosedResolve] && !client[kSize]) { + client[kClosedResolve]() + client[kClosedResolve] = null + return + } + + const socket = client[kSocket] + + if (socket && !socket.destroyed && socket.alpnProtocol !== 'h2') { + if (client[kSize] === 0) { + if (!socket[kNoRef] && socket.unref) { + socket.unref() + socket[kNoRef] = true + } + } else if (socket[kNoRef] && socket.ref) { + socket.ref() + socket[kNoRef] = false + } + + if (client[kSize] === 0) { + if (socket[kParser].timeoutType !== TIMEOUT_IDLE) { + socket[kParser].setTimeout(client[kKeepAliveTimeoutValue], TIMEOUT_IDLE) + } + } else if (client[kRunning] > 0 && socket[kParser].statusCode < 200) { + if (socket[kParser].timeoutType !== TIMEOUT_HEADERS) { + const request = client[kQueue][client[kRunningIdx]] + const headersTimeout = request.headersTimeout != null + ? request.headersTimeout + : client[kHeadersTimeout] + socket[kParser].setTimeout(headersTimeout, TIMEOUT_HEADERS) + } + } + } + + if (client[kBusy]) { + client[kNeedDrain] = 2 + } else if (client[kNeedDrain] === 2) { + if (sync) { + client[kNeedDrain] = 1 + process.nextTick(emitDrain, client) + } else { + emitDrain(client) + } + continue + } + + if (client[kPending] === 0) { + return + } + + if (client[kRunning] >= (client[kPipelining] || 1)) { + return + } + + const request = client[kQueue][client[kPendingIdx]] + + if (client[kUrl].protocol === 'https:' && client[kServerName] !== request.servername) { + if (client[kRunning] > 0) { + return + } + + client[kServerName] = request.servername + + if (socket && socket.servername !== request.servername) { + util.destroy(socket, new InformationalError('servername changed')) + return + } + } + + if (client[kConnecting]) { + return + } + + if (!socket && !client[kHTTP2Session]) { + connect(client) + return + } + + if (socket.destroyed || socket[kWriting] || socket[kReset] || socket[kBlocking]) { + return + } + + if (client[kRunning] > 0 && !request.idempotent) { + // Non-idempotent request cannot be retried. + // Ensure that no other requests are inflight and + // could cause failure. + return + } + + if (client[kRunning] > 0 && (request.upgrade || request.method === 'CONNECT')) { + // Don't dispatch an upgrade until all preceding requests have completed. + // A misbehaving server might upgrade the connection before all pipelined + // request has completed. + return + } + + if (client[kRunning] > 0 && util.bodyLength(request.body) !== 0 && + (util.isStream(request.body) || util.isAsyncIterable(request.body))) { + // Request with stream or iterator body can error while other requests + // are inflight and indirectly error those as well. + // Ensure this doesn't happen by waiting for inflight + // to complete before dispatching. + + // Request with stream or iterator body cannot be retried. + // Ensure that no other requests are inflight and + // could cause failure. + return + } + + if (!request.aborted && write(client, request)) { + client[kPendingIdx]++ + } else { + client[kQueue].splice(client[kPendingIdx], 1) + } + } +} + +// https://www.rfc-editor.org/rfc/rfc7230#section-3.3.2 +function shouldSendContentLength (method) { + return method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS' && method !== 'TRACE' && method !== 'CONNECT' +} + +function write (client, request) { + if (client[kHTTPConnVersion] === 'h2') { + writeH2(client, client[kHTTP2Session], request) + return + } + + const { body, method, path, host, upgrade, headers, blocking, reset } = request + + // https://tools.ietf.org/html/rfc7231#section-4.3.1 + // https://tools.ietf.org/html/rfc7231#section-4.3.2 + // https://tools.ietf.org/html/rfc7231#section-4.3.5 + + // Sending a payload body on a request that does not + // expect it can cause undefined behavior on some + // servers and corrupt connection state. Do not + // re-use the connection for further requests. + + const expectsPayload = ( + method === 'PUT' || + method === 'POST' || + method === 'PATCH' + ) + + if (body && typeof body.read === 'function') { + // Try to read EOF in order to get length. + body.read(0) + } + + const bodyLength = util.bodyLength(body) + + let contentLength = bodyLength + + if (contentLength === null) { + contentLength = request.contentLength + } + + if (contentLength === 0 && !expectsPayload) { + // https://tools.ietf.org/html/rfc7230#section-3.3.2 + // A user agent SHOULD NOT send a Content-Length header field when + // the request message does not contain a payload body and the method + // semantics do not anticipate such a body. + + contentLength = null + } + + // https://github.com/nodejs/undici/issues/2046 + // A user agent may send a Content-Length header with 0 value, this should be allowed. + if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength !== null && request.contentLength !== contentLength) { + if (client[kStrictContentLength]) { + errorRequest(client, request, new RequestContentLengthMismatchError()) + return false + } + + process.emitWarning(new RequestContentLengthMismatchError()) + } + + const socket = client[kSocket] + + try { + request.onConnect((err) => { + if (request.aborted || request.completed) { + return + } + + errorRequest(client, request, err || new RequestAbortedError()) + + util.destroy(socket, new InformationalError('aborted')) + }) + } catch (err) { + errorRequest(client, request, err) + } + + if (request.aborted) { + return false + } + + if (method === 'HEAD') { + // https://github.com/mcollina/undici/issues/258 + // Close after a HEAD request to interop with misbehaving servers + // that may send a body in the response. + + socket[kReset] = true + } + + if (upgrade || method === 'CONNECT') { + // On CONNECT or upgrade, block pipeline from dispatching further + // requests on this connection. + + socket[kReset] = true + } + + if (reset != null) { + socket[kReset] = reset + } + + if (client[kMaxRequests] && socket[kCounter]++ >= client[kMaxRequests]) { + socket[kReset] = true + } + + if (blocking) { + socket[kBlocking] = true + } + + let header = `${method} ${path} HTTP/1.1\r\n` + + if (typeof host === 'string') { + header += `host: ${host}\r\n` + } else { + header += client[kHostHeader] + } + + if (upgrade) { + header += `connection: upgrade\r\nupgrade: ${upgrade}\r\n` + } else if (client[kPipelining] && !socket[kReset]) { + header += 'connection: keep-alive\r\n' + } else { + header += 'connection: close\r\n' + } + + if (headers) { + header += headers + } + + if (channels.sendHeaders.hasSubscribers) { + channels.sendHeaders.publish({ request, headers: header, socket }) + } + + /* istanbul ignore else: assertion */ + if (!body || bodyLength === 0) { + if (contentLength === 0) { + socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1') + } else { + assert(contentLength === null, 'no body must not have content length') + socket.write(`${header}\r\n`, 'latin1') + } + request.onRequestSent() + } else if (util.isBuffer(body)) { + assert(contentLength === body.byteLength, 'buffer body must have content length') + + socket.cork() + socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') + socket.write(body) + socket.uncork() + request.onBodySent(body) + request.onRequestSent() + if (!expectsPayload) { + socket[kReset] = true + } + } else if (util.isBlobLike(body)) { + if (typeof body.stream === 'function') { + writeIterable({ body: body.stream(), client, request, socket, contentLength, header, expectsPayload }) + } else { + writeBlob({ body, client, request, socket, contentLength, header, expectsPayload }) + } + } else if (util.isStream(body)) { + writeStream({ body, client, request, socket, contentLength, header, expectsPayload }) + } else if (util.isIterable(body)) { + writeIterable({ body, client, request, socket, contentLength, header, expectsPayload }) + } else { + assert(false) + } + + return true +} + +function writeH2 (client, session, request) { + const { body, method, path, host, upgrade, expectContinue, signal, headers: reqHeaders } = request + + let headers + if (typeof reqHeaders === 'string') headers = Request[kHTTP2CopyHeaders](reqHeaders.trim()) + else headers = reqHeaders + + if (upgrade) { + errorRequest(client, request, new Error('Upgrade not supported for H2')) + return false + } + + try { + // TODO(HTTP/2): Should we call onConnect immediately or on stream ready event? + request.onConnect((err) => { + if (request.aborted || request.completed) { + return + } + + errorRequest(client, request, err || new RequestAbortedError()) + }) + } catch (err) { + errorRequest(client, request, err) + } + + if (request.aborted) { + return false + } + + /** @type {import('node:http2').ClientHttp2Stream} */ + let stream + const h2State = client[kHTTP2SessionState] + + headers[HTTP2_HEADER_AUTHORITY] = host || client[kHost] + headers[HTTP2_HEADER_METHOD] = method + + if (method === 'CONNECT') { + session.ref() + // we are already connected, streams are pending, first request + // will create a new stream. We trigger a request to create the stream and wait until + // `ready` event is triggered + // We disabled endStream to allow the user to write to the stream + stream = session.request(headers, { endStream: false, signal }) + + if (stream.id && !stream.pending) { + request.onUpgrade(null, null, stream) + ++h2State.openStreams + } else { + stream.once('ready', () => { + request.onUpgrade(null, null, stream) + ++h2State.openStreams + }) + } + + stream.once('close', () => { + h2State.openStreams -= 1 + // TODO(HTTP/2): unref only if current streams count is 0 + if (h2State.openStreams === 0) session.unref() + }) + + return true + } + + // https://tools.ietf.org/html/rfc7540#section-8.3 + // :path and :scheme headers must be omited when sending CONNECT + + headers[HTTP2_HEADER_PATH] = path + headers[HTTP2_HEADER_SCHEME] = 'https' + + // https://tools.ietf.org/html/rfc7231#section-4.3.1 + // https://tools.ietf.org/html/rfc7231#section-4.3.2 + // https://tools.ietf.org/html/rfc7231#section-4.3.5 + + // Sending a payload body on a request that does not + // expect it can cause undefined behavior on some + // servers and corrupt connection state. Do not + // re-use the connection for further requests. + + const expectsPayload = ( + method === 'PUT' || + method === 'POST' || + method === 'PATCH' + ) + + if (body && typeof body.read === 'function') { + // Try to read EOF in order to get length. + body.read(0) + } + + let contentLength = util.bodyLength(body) + + if (contentLength == null) { + contentLength = request.contentLength + } + + if (contentLength === 0 || !expectsPayload) { + // https://tools.ietf.org/html/rfc7230#section-3.3.2 + // A user agent SHOULD NOT send a Content-Length header field when + // the request message does not contain a payload body and the method + // semantics do not anticipate such a body. + + contentLength = null + } + + // https://github.com/nodejs/undici/issues/2046 + // A user agent may send a Content-Length header with 0 value, this should be allowed. + if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength != null && request.contentLength !== contentLength) { + if (client[kStrictContentLength]) { + errorRequest(client, request, new RequestContentLengthMismatchError()) + return false + } + + process.emitWarning(new RequestContentLengthMismatchError()) + } + + if (contentLength != null) { + assert(body, 'no body must not have content length') + headers[HTTP2_HEADER_CONTENT_LENGTH] = `${contentLength}` + } + + session.ref() + + const shouldEndStream = method === 'GET' || method === 'HEAD' + if (expectContinue) { + headers[HTTP2_HEADER_EXPECT] = '100-continue' + stream = session.request(headers, { endStream: shouldEndStream, signal }) + + stream.once('continue', writeBodyH2) + } else { + stream = session.request(headers, { + endStream: shouldEndStream, + signal + }) + writeBodyH2() + } + + // Increment counter as we have new several streams open + ++h2State.openStreams + + stream.once('response', headers => { + const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers + + if (request.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), '') === false) { + stream.pause() + } + }) + + stream.once('end', () => { + request.onComplete([]) + }) + + stream.on('data', (chunk) => { + if (request.onData(chunk) === false) { + stream.pause() + } + }) + + stream.once('close', () => { + h2State.openStreams -= 1 + // TODO(HTTP/2): unref only if current streams count is 0 + if (h2State.openStreams === 0) { + session.unref() + } + }) + + stream.once('error', function (err) { + if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { + h2State.streams -= 1 + util.destroy(stream, err) + } + }) + + stream.once('frameError', (type, code) => { + const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`) + errorRequest(client, request, err) + + if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { + h2State.streams -= 1 + util.destroy(stream, err) + } + }) + + // stream.on('aborted', () => { + // // TODO(HTTP/2): Support aborted + // }) + + // stream.on('timeout', () => { + // // TODO(HTTP/2): Support timeout + // }) + + // stream.on('push', headers => { + // // TODO(HTTP/2): Suppor push + // }) + + // stream.on('trailers', headers => { + // // TODO(HTTP/2): Support trailers + // }) + + return true + + function writeBodyH2 () { + /* istanbul ignore else: assertion */ + if (!body) { + request.onRequestSent() + } else if (util.isBuffer(body)) { + assert(contentLength === body.byteLength, 'buffer body must have content length') + stream.cork() + stream.write(body) + stream.uncork() + stream.end() + request.onBodySent(body) + request.onRequestSent() + } else if (util.isBlobLike(body)) { + if (typeof body.stream === 'function') { + writeIterable({ + client, + request, + contentLength, + h2stream: stream, + expectsPayload, + body: body.stream(), + socket: client[kSocket], + header: '' + }) + } else { + writeBlob({ + body, + client, + request, + contentLength, + expectsPayload, + h2stream: stream, + header: '', + socket: client[kSocket] + }) + } + } else if (util.isStream(body)) { + writeStream({ + body, + client, + request, + contentLength, + expectsPayload, + socket: client[kSocket], + h2stream: stream, + header: '' + }) + } else if (util.isIterable(body)) { + writeIterable({ + body, + client, + request, + contentLength, + expectsPayload, + header: '', + h2stream: stream, + socket: client[kSocket] + }) + } else { + assert(false) + } + } +} + +function writeStream ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { + assert(contentLength !== 0 || client[kRunning] === 0, 'stream body cannot be pipelined') + + if (client[kHTTPConnVersion] === 'h2') { + // For HTTP/2, is enough to pipe the stream + const pipe = pipeline( + body, + h2stream, + (err) => { + if (err) { + util.destroy(body, err) + util.destroy(h2stream, err) + } else { + request.onRequestSent() + } + } + ) + + pipe.on('data', onPipeData) + pipe.once('end', () => { + pipe.removeListener('data', onPipeData) + util.destroy(pipe) + }) + + function onPipeData (chunk) { + request.onBodySent(chunk) + } + + return + } + + let finished = false + + const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header }) + + const onData = function (chunk) { + if (finished) { + return + } + + try { + if (!writer.write(chunk) && this.pause) { + this.pause() + } + } catch (err) { + util.destroy(this, err) + } + } + const onDrain = function () { + if (finished) { + return + } + + if (body.resume) { + body.resume() + } + } + const onAbort = function () { + if (finished) { + return + } + const err = new RequestAbortedError() + queueMicrotask(() => onFinished(err)) + } + const onFinished = function (err) { + if (finished) { + return + } + + finished = true + + assert(socket.destroyed || (socket[kWriting] && client[kRunning] <= 1)) + + socket + .off('drain', onDrain) + .off('error', onFinished) + + body + .removeListener('data', onData) + .removeListener('end', onFinished) + .removeListener('error', onFinished) + .removeListener('close', onAbort) + + if (!err) { + try { + writer.end() + } catch (er) { + err = er + } + } + + writer.destroy(err) + + if (err && (err.code !== 'UND_ERR_INFO' || err.message !== 'reset')) { + util.destroy(body, err) + } else { + util.destroy(body) + } + } + + body + .on('data', onData) + .on('end', onFinished) + .on('error', onFinished) + .on('close', onAbort) + + if (body.resume) { + body.resume() + } + + socket + .on('drain', onDrain) + .on('error', onFinished) +} + +async function writeBlob ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { + assert(contentLength === body.size, 'blob body must have content length') + + const isH2 = client[kHTTPConnVersion] === 'h2' + try { + if (contentLength != null && contentLength !== body.size) { + throw new RequestContentLengthMismatchError() + } + + const buffer = Buffer.from(await body.arrayBuffer()) + + if (isH2) { + h2stream.cork() + h2stream.write(buffer) + h2stream.uncork() + } else { + socket.cork() + socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') + socket.write(buffer) + socket.uncork() + } + + request.onBodySent(buffer) + request.onRequestSent() + + if (!expectsPayload) { + socket[kReset] = true + } + + resume(client) + } catch (err) { + util.destroy(isH2 ? h2stream : socket, err) + } +} + +async function writeIterable ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { + assert(contentLength !== 0 || client[kRunning] === 0, 'iterator body cannot be pipelined') + + let callback = null + function onDrain () { + if (callback) { + const cb = callback + callback = null + cb() + } + } + + const waitForDrain = () => new Promise((resolve, reject) => { + assert(callback === null) + + if (socket[kError]) { + reject(socket[kError]) + } else { + callback = resolve + } + }) + + if (client[kHTTPConnVersion] === 'h2') { + h2stream + .on('close', onDrain) + .on('drain', onDrain) + + try { + // It's up to the user to somehow abort the async iterable. + for await (const chunk of body) { + if (socket[kError]) { + throw socket[kError] + } + + const res = h2stream.write(chunk) + request.onBodySent(chunk) + if (!res) { + await waitForDrain() + } + } + } catch (err) { + h2stream.destroy(err) + } finally { + request.onRequestSent() + h2stream.end() + h2stream + .off('close', onDrain) + .off('drain', onDrain) + } + + return + } + + socket + .on('close', onDrain) + .on('drain', onDrain) + + const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header }) + try { + // It's up to the user to somehow abort the async iterable. + for await (const chunk of body) { + if (socket[kError]) { + throw socket[kError] + } + + if (!writer.write(chunk)) { + await waitForDrain() + } + } + + writer.end() + } catch (err) { + writer.destroy(err) + } finally { + socket + .off('close', onDrain) + .off('drain', onDrain) + } +} + +class AsyncWriter { + constructor ({ socket, request, contentLength, client, expectsPayload, header }) { + this.socket = socket + this.request = request + this.contentLength = contentLength + this.client = client + this.bytesWritten = 0 + this.expectsPayload = expectsPayload + this.header = header + + socket[kWriting] = true + } + + write (chunk) { + const { socket, request, contentLength, client, bytesWritten, expectsPayload, header } = this + + if (socket[kError]) { + throw socket[kError] + } + + if (socket.destroyed) { + return false + } + + const len = Buffer.byteLength(chunk) + if (!len) { + return true + } + + // We should defer writing chunks. + if (contentLength !== null && bytesWritten + len > contentLength) { + if (client[kStrictContentLength]) { + throw new RequestContentLengthMismatchError() + } + + process.emitWarning(new RequestContentLengthMismatchError()) + } + + socket.cork() + + if (bytesWritten === 0) { + if (!expectsPayload) { + socket[kReset] = true + } + + if (contentLength === null) { + socket.write(`${header}transfer-encoding: chunked\r\n`, 'latin1') + } else { + socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') + } + } + + if (contentLength === null) { + socket.write(`\r\n${len.toString(16)}\r\n`, 'latin1') + } + + this.bytesWritten += len + + const ret = socket.write(chunk) + + socket.uncork() + + request.onBodySent(chunk) + + if (!ret) { + if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { + // istanbul ignore else: only for jest + if (socket[kParser].timeout.refresh) { + socket[kParser].timeout.refresh() + } + } + } + + return ret + } + + end () { + const { socket, contentLength, client, bytesWritten, expectsPayload, header, request } = this + request.onRequestSent() + + socket[kWriting] = false + + if (socket[kError]) { + throw socket[kError] + } + + if (socket.destroyed) { + return + } + + if (bytesWritten === 0) { + if (expectsPayload) { + // https://tools.ietf.org/html/rfc7230#section-3.3.2 + // A user agent SHOULD send a Content-Length in a request message when + // no Transfer-Encoding is sent and the request method defines a meaning + // for an enclosed payload body. + + socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1') + } else { + socket.write(`${header}\r\n`, 'latin1') + } + } else if (contentLength === null) { + socket.write('\r\n0\r\n\r\n', 'latin1') + } + + if (contentLength !== null && bytesWritten !== contentLength) { + if (client[kStrictContentLength]) { + throw new RequestContentLengthMismatchError() + } else { + process.emitWarning(new RequestContentLengthMismatchError()) + } + } + + if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { + // istanbul ignore else: only for jest + if (socket[kParser].timeout.refresh) { + socket[kParser].timeout.refresh() + } + } + + resume(client) + } + + destroy (err) { + const { socket, client } = this + + socket[kWriting] = false + + if (err) { + assert(client[kRunning] <= 1, 'pipeline should only contain this request') + util.destroy(socket, err) + } + } +} + +function errorRequest (client, request, err) { + try { + request.onError(err) + assert(request.aborted) + } catch (err) { + client.emit('error', err) + } +} + +module.exports = Client + + +/***/ }), + +/***/ 3194: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +/* istanbul ignore file: only for Node 12 */ + +const { kConnected, kSize } = __nccwpck_require__(6443) + +class CompatWeakRef { + constructor (value) { + this.value = value + } + + deref () { + return this.value[kConnected] === 0 && this.value[kSize] === 0 + ? undefined + : this.value + } +} + +class CompatFinalizer { + constructor (finalizer) { + this.finalizer = finalizer + } + + register (dispatcher, key) { + if (dispatcher.on) { + dispatcher.on('disconnect', () => { + if (dispatcher[kConnected] === 0 && dispatcher[kSize] === 0) { + this.finalizer(key) + } + }) + } + } +} + +module.exports = function () { + // FIXME: remove workaround when the Node bug is fixed + // https://github.com/nodejs/node/issues/49344#issuecomment-1741776308 + if (process.env.NODE_V8_COVERAGE) { + return { + WeakRef: CompatWeakRef, + FinalizationRegistry: CompatFinalizer + } + } + return { + WeakRef: global.WeakRef || CompatWeakRef, + FinalizationRegistry: global.FinalizationRegistry || CompatFinalizer + } +} + + +/***/ }), + +/***/ 9237: +/***/ ((module) => { + +"use strict"; + + +// https://wicg.github.io/cookie-store/#cookie-maximum-attribute-value-size +const maxAttributeValueSize = 1024 + +// https://wicg.github.io/cookie-store/#cookie-maximum-name-value-pair-size +const maxNameValuePairSize = 4096 + +module.exports = { + maxAttributeValueSize, + maxNameValuePairSize +} + + +/***/ }), + +/***/ 3168: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { parseSetCookie } = __nccwpck_require__(8915) +const { stringify, getHeadersList } = __nccwpck_require__(3834) +const { webidl } = __nccwpck_require__(4222) +const { Headers } = __nccwpck_require__(6349) + +/** + * @typedef {Object} Cookie + * @property {string} name + * @property {string} value + * @property {Date|number|undefined} expires + * @property {number|undefined} maxAge + * @property {string|undefined} domain + * @property {string|undefined} path + * @property {boolean|undefined} secure + * @property {boolean|undefined} httpOnly + * @property {'Strict'|'Lax'|'None'} sameSite + * @property {string[]} unparsed + */ + +/** + * @param {Headers} headers + * @returns {Record} + */ +function getCookies (headers) { + webidl.argumentLengthCheck(arguments, 1, { header: 'getCookies' }) + + webidl.brandCheck(headers, Headers, { strict: false }) + + const cookie = headers.get('cookie') + const out = {} + + if (!cookie) { + return out + } + + for (const piece of cookie.split(';')) { + const [name, ...value] = piece.split('=') + + out[name.trim()] = value.join('=') + } + + return out +} + +/** + * @param {Headers} headers + * @param {string} name + * @param {{ path?: string, domain?: string }|undefined} attributes + * @returns {void} + */ +function deleteCookie (headers, name, attributes) { + webidl.argumentLengthCheck(arguments, 2, { header: 'deleteCookie' }) + + webidl.brandCheck(headers, Headers, { strict: false }) + + name = webidl.converters.DOMString(name) + attributes = webidl.converters.DeleteCookieAttributes(attributes) + + // Matches behavior of + // https://github.com/denoland/deno_std/blob/63827b16330b82489a04614027c33b7904e08be5/http/cookie.ts#L278 + setCookie(headers, { + name, + value: '', + expires: new Date(0), + ...attributes + }) +} + +/** + * @param {Headers} headers + * @returns {Cookie[]} + */ +function getSetCookies (headers) { + webidl.argumentLengthCheck(arguments, 1, { header: 'getSetCookies' }) + + webidl.brandCheck(headers, Headers, { strict: false }) + + const cookies = getHeadersList(headers).cookies + + if (!cookies) { + return [] + } + + // In older versions of undici, cookies is a list of name:value. + return cookies.map((pair) => parseSetCookie(Array.isArray(pair) ? pair[1] : pair)) +} + +/** + * @param {Headers} headers + * @param {Cookie} cookie + * @returns {void} + */ +function setCookie (headers, cookie) { + webidl.argumentLengthCheck(arguments, 2, { header: 'setCookie' }) + + webidl.brandCheck(headers, Headers, { strict: false }) + + cookie = webidl.converters.Cookie(cookie) + + const str = stringify(cookie) + + if (str) { + headers.append('Set-Cookie', stringify(cookie)) + } +} + +webidl.converters.DeleteCookieAttributes = webidl.dictionaryConverter([ + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'path', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'domain', + defaultValue: null + } +]) + +webidl.converters.Cookie = webidl.dictionaryConverter([ + { + converter: webidl.converters.DOMString, + key: 'name' + }, + { + converter: webidl.converters.DOMString, + key: 'value' + }, + { + converter: webidl.nullableConverter((value) => { + if (typeof value === 'number') { + return webidl.converters['unsigned long long'](value) + } + + return new Date(value) + }), + key: 'expires', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters['long long']), + key: 'maxAge', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'domain', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.DOMString), + key: 'path', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.boolean), + key: 'secure', + defaultValue: null + }, + { + converter: webidl.nullableConverter(webidl.converters.boolean), + key: 'httpOnly', + defaultValue: null + }, + { + converter: webidl.converters.USVString, + key: 'sameSite', + allowedValues: ['Strict', 'Lax', 'None'] + }, + { + converter: webidl.sequenceConverter(webidl.converters.DOMString), + key: 'unparsed', + defaultValue: [] + } +]) + +module.exports = { + getCookies, + deleteCookie, + getSetCookies, + setCookie +} + + +/***/ }), + +/***/ 8915: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { maxNameValuePairSize, maxAttributeValueSize } = __nccwpck_require__(9237) +const { isCTLExcludingHtab } = __nccwpck_require__(3834) +const { collectASequenceOfCodePointsFast } = __nccwpck_require__(4322) +const assert = __nccwpck_require__(2613) + +/** + * @description Parses the field-value attributes of a set-cookie header string. + * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4 + * @param {string} header + * @returns if the header is invalid, null will be returned + */ +function parseSetCookie (header) { + // 1. If the set-cookie-string contains a %x00-08 / %x0A-1F / %x7F + // character (CTL characters excluding HTAB): Abort these steps and + // ignore the set-cookie-string entirely. + if (isCTLExcludingHtab(header)) { + return null + } + + let nameValuePair = '' + let unparsedAttributes = '' + let name = '' + let value = '' + + // 2. If the set-cookie-string contains a %x3B (";") character: + if (header.includes(';')) { + // 1. The name-value-pair string consists of the characters up to, + // but not including, the first %x3B (";"), and the unparsed- + // attributes consist of the remainder of the set-cookie-string + // (including the %x3B (";") in question). + const position = { position: 0 } + + nameValuePair = collectASequenceOfCodePointsFast(';', header, position) + unparsedAttributes = header.slice(position.position) + } else { + // Otherwise: + + // 1. The name-value-pair string consists of all the characters + // contained in the set-cookie-string, and the unparsed- + // attributes is the empty string. + nameValuePair = header + } + + // 3. If the name-value-pair string lacks a %x3D ("=") character, then + // the name string is empty, and the value string is the value of + // name-value-pair. + if (!nameValuePair.includes('=')) { + value = nameValuePair + } else { + // Otherwise, the name string consists of the characters up to, but + // not including, the first %x3D ("=") character, and the (possibly + // empty) value string consists of the characters after the first + // %x3D ("=") character. + const position = { position: 0 } + name = collectASequenceOfCodePointsFast( + '=', + nameValuePair, + position + ) + value = nameValuePair.slice(position.position + 1) + } + + // 4. Remove any leading or trailing WSP characters from the name + // string and the value string. + name = name.trim() + value = value.trim() + + // 5. If the sum of the lengths of the name string and the value string + // is more than 4096 octets, abort these steps and ignore the set- + // cookie-string entirely. + if (name.length + value.length > maxNameValuePairSize) { + return null + } + + // 6. The cookie-name is the name string, and the cookie-value is the + // value string. + return { + name, value, ...parseUnparsedAttributes(unparsedAttributes) + } +} + +/** + * Parses the remaining attributes of a set-cookie header + * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4 + * @param {string} unparsedAttributes + * @param {[Object.]={}} cookieAttributeList + */ +function parseUnparsedAttributes (unparsedAttributes, cookieAttributeList = {}) { + // 1. If the unparsed-attributes string is empty, skip the rest of + // these steps. + if (unparsedAttributes.length === 0) { + return cookieAttributeList + } + + // 2. Discard the first character of the unparsed-attributes (which + // will be a %x3B (";") character). + assert(unparsedAttributes[0] === ';') + unparsedAttributes = unparsedAttributes.slice(1) + + let cookieAv = '' + + // 3. If the remaining unparsed-attributes contains a %x3B (";") + // character: + if (unparsedAttributes.includes(';')) { + // 1. Consume the characters of the unparsed-attributes up to, but + // not including, the first %x3B (";") character. + cookieAv = collectASequenceOfCodePointsFast( + ';', + unparsedAttributes, + { position: 0 } + ) + unparsedAttributes = unparsedAttributes.slice(cookieAv.length) + } else { + // Otherwise: + + // 1. Consume the remainder of the unparsed-attributes. + cookieAv = unparsedAttributes + unparsedAttributes = '' + } + + // Let the cookie-av string be the characters consumed in this step. + + let attributeName = '' + let attributeValue = '' + + // 4. If the cookie-av string contains a %x3D ("=") character: + if (cookieAv.includes('=')) { + // 1. The (possibly empty) attribute-name string consists of the + // characters up to, but not including, the first %x3D ("=") + // character, and the (possibly empty) attribute-value string + // consists of the characters after the first %x3D ("=") + // character. + const position = { position: 0 } + + attributeName = collectASequenceOfCodePointsFast( + '=', + cookieAv, + position + ) + attributeValue = cookieAv.slice(position.position + 1) + } else { + // Otherwise: + + // 1. The attribute-name string consists of the entire cookie-av + // string, and the attribute-value string is empty. + attributeName = cookieAv + } + + // 5. Remove any leading or trailing WSP characters from the attribute- + // name string and the attribute-value string. + attributeName = attributeName.trim() + attributeValue = attributeValue.trim() + + // 6. If the attribute-value is longer than 1024 octets, ignore the + // cookie-av string and return to Step 1 of this algorithm. + if (attributeValue.length > maxAttributeValueSize) { + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) + } + + // 7. Process the attribute-name and attribute-value according to the + // requirements in the following subsections. (Notice that + // attributes with unrecognized attribute-names are ignored.) + const attributeNameLowercase = attributeName.toLowerCase() + + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.1 + // If the attribute-name case-insensitively matches the string + // "Expires", the user agent MUST process the cookie-av as follows. + if (attributeNameLowercase === 'expires') { + // 1. Let the expiry-time be the result of parsing the attribute-value + // as cookie-date (see Section 5.1.1). + const expiryTime = new Date(attributeValue) + + // 2. If the attribute-value failed to parse as a cookie date, ignore + // the cookie-av. + + cookieAttributeList.expires = expiryTime + } else if (attributeNameLowercase === 'max-age') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.2 + // If the attribute-name case-insensitively matches the string "Max- + // Age", the user agent MUST process the cookie-av as follows. + + // 1. If the first character of the attribute-value is not a DIGIT or a + // "-" character, ignore the cookie-av. + const charCode = attributeValue.charCodeAt(0) + + if ((charCode < 48 || charCode > 57) && attributeValue[0] !== '-') { + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) + } + + // 2. If the remainder of attribute-value contains a non-DIGIT + // character, ignore the cookie-av. + if (!/^\d+$/.test(attributeValue)) { + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) + } + + // 3. Let delta-seconds be the attribute-value converted to an integer. + const deltaSeconds = Number(attributeValue) + + // 4. Let cookie-age-limit be the maximum age of the cookie (which + // SHOULD be 400 days or less, see Section 4.1.2.2). + + // 5. Set delta-seconds to the smaller of its present value and cookie- + // age-limit. + // deltaSeconds = Math.min(deltaSeconds * 1000, maxExpiresMs) + + // 6. If delta-seconds is less than or equal to zero (0), let expiry- + // time be the earliest representable date and time. Otherwise, let + // the expiry-time be the current date and time plus delta-seconds + // seconds. + // const expiryTime = deltaSeconds <= 0 ? Date.now() : Date.now() + deltaSeconds + + // 7. Append an attribute to the cookie-attribute-list with an + // attribute-name of Max-Age and an attribute-value of expiry-time. + cookieAttributeList.maxAge = deltaSeconds + } else if (attributeNameLowercase === 'domain') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.3 + // If the attribute-name case-insensitively matches the string "Domain", + // the user agent MUST process the cookie-av as follows. + + // 1. Let cookie-domain be the attribute-value. + let cookieDomain = attributeValue + + // 2. If cookie-domain starts with %x2E ("."), let cookie-domain be + // cookie-domain without its leading %x2E ("."). + if (cookieDomain[0] === '.') { + cookieDomain = cookieDomain.slice(1) + } + + // 3. Convert the cookie-domain to lower case. + cookieDomain = cookieDomain.toLowerCase() + + // 4. Append an attribute to the cookie-attribute-list with an + // attribute-name of Domain and an attribute-value of cookie-domain. + cookieAttributeList.domain = cookieDomain + } else if (attributeNameLowercase === 'path') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.4 + // If the attribute-name case-insensitively matches the string "Path", + // the user agent MUST process the cookie-av as follows. + + // 1. If the attribute-value is empty or if the first character of the + // attribute-value is not %x2F ("/"): + let cookiePath = '' + if (attributeValue.length === 0 || attributeValue[0] !== '/') { + // 1. Let cookie-path be the default-path. + cookiePath = '/' + } else { + // Otherwise: + + // 1. Let cookie-path be the attribute-value. + cookiePath = attributeValue + } + + // 2. Append an attribute to the cookie-attribute-list with an + // attribute-name of Path and an attribute-value of cookie-path. + cookieAttributeList.path = cookiePath + } else if (attributeNameLowercase === 'secure') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.5 + // If the attribute-name case-insensitively matches the string "Secure", + // the user agent MUST append an attribute to the cookie-attribute-list + // with an attribute-name of Secure and an empty attribute-value. + + cookieAttributeList.secure = true + } else if (attributeNameLowercase === 'httponly') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.6 + // If the attribute-name case-insensitively matches the string + // "HttpOnly", the user agent MUST append an attribute to the cookie- + // attribute-list with an attribute-name of HttpOnly and an empty + // attribute-value. + + cookieAttributeList.httpOnly = true + } else if (attributeNameLowercase === 'samesite') { + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.7 + // If the attribute-name case-insensitively matches the string + // "SameSite", the user agent MUST process the cookie-av as follows: + + // 1. Let enforcement be "Default". + let enforcement = 'Default' + + const attributeValueLowercase = attributeValue.toLowerCase() + // 2. If cookie-av's attribute-value is a case-insensitive match for + // "None", set enforcement to "None". + if (attributeValueLowercase.includes('none')) { + enforcement = 'None' + } + + // 3. If cookie-av's attribute-value is a case-insensitive match for + // "Strict", set enforcement to "Strict". + if (attributeValueLowercase.includes('strict')) { + enforcement = 'Strict' + } + + // 4. If cookie-av's attribute-value is a case-insensitive match for + // "Lax", set enforcement to "Lax". + if (attributeValueLowercase.includes('lax')) { + enforcement = 'Lax' + } + + // 5. Append an attribute to the cookie-attribute-list with an + // attribute-name of "SameSite" and an attribute-value of + // enforcement. + cookieAttributeList.sameSite = enforcement + } else { + cookieAttributeList.unparsed ??= [] + + cookieAttributeList.unparsed.push(`${attributeName}=${attributeValue}`) + } + + // 8. Return to Step 1 of this algorithm. + return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) +} + +module.exports = { + parseSetCookie, + parseUnparsedAttributes +} + + +/***/ }), + +/***/ 3834: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const assert = __nccwpck_require__(2613) +const { kHeadersList } = __nccwpck_require__(6443) + +function isCTLExcludingHtab (value) { + if (value.length === 0) { + return false + } + + for (const char of value) { + const code = char.charCodeAt(0) + + if ( + (code >= 0x00 || code <= 0x08) || + (code >= 0x0A || code <= 0x1F) || + code === 0x7F + ) { + return false + } + } +} + +/** + CHAR = + token = 1* + separators = "(" | ")" | "<" | ">" | "@" + | "," | ";" | ":" | "\" | <"> + | "/" | "[" | "]" | "?" | "=" + | "{" | "}" | SP | HT + * @param {string} name + */ +function validateCookieName (name) { + for (const char of name) { + const code = char.charCodeAt(0) + + if ( + (code <= 0x20 || code > 0x7F) || + char === '(' || + char === ')' || + char === '>' || + char === '<' || + char === '@' || + char === ',' || + char === ';' || + char === ':' || + char === '\\' || + char === '"' || + char === '/' || + char === '[' || + char === ']' || + char === '?' || + char === '=' || + char === '{' || + char === '}' + ) { + throw new Error('Invalid cookie name') + } + } +} + +/** + cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE ) + cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E + ; US-ASCII characters excluding CTLs, + ; whitespace DQUOTE, comma, semicolon, + ; and backslash + * @param {string} value + */ +function validateCookieValue (value) { + for (const char of value) { + const code = char.charCodeAt(0) + + if ( + code < 0x21 || // exclude CTLs (0-31) + code === 0x22 || + code === 0x2C || + code === 0x3B || + code === 0x5C || + code > 0x7E // non-ascii + ) { + throw new Error('Invalid header value') + } + } +} + +/** + * path-value = + * @param {string} path + */ +function validateCookiePath (path) { + for (const char of path) { + const code = char.charCodeAt(0) + + if (code < 0x21 || char === ';') { + throw new Error('Invalid cookie path') + } + } +} + +/** + * I have no idea why these values aren't allowed to be honest, + * but Deno tests these. - Khafra + * @param {string} domain + */ +function validateCookieDomain (domain) { + if ( + domain.startsWith('-') || + domain.endsWith('.') || + domain.endsWith('-') + ) { + throw new Error('Invalid cookie domain') + } +} + +/** + * @see https://www.rfc-editor.org/rfc/rfc7231#section-7.1.1.1 + * @param {number|Date} date + IMF-fixdate = day-name "," SP date1 SP time-of-day SP GMT + ; fixed length/zone/capitalization subset of the format + ; see Section 3.3 of [RFC5322] + + day-name = %x4D.6F.6E ; "Mon", case-sensitive + / %x54.75.65 ; "Tue", case-sensitive + / %x57.65.64 ; "Wed", case-sensitive + / %x54.68.75 ; "Thu", case-sensitive + / %x46.72.69 ; "Fri", case-sensitive + / %x53.61.74 ; "Sat", case-sensitive + / %x53.75.6E ; "Sun", case-sensitive + date1 = day SP month SP year + ; e.g., 02 Jun 1982 + + day = 2DIGIT + month = %x4A.61.6E ; "Jan", case-sensitive + / %x46.65.62 ; "Feb", case-sensitive + / %x4D.61.72 ; "Mar", case-sensitive + / %x41.70.72 ; "Apr", case-sensitive + / %x4D.61.79 ; "May", case-sensitive + / %x4A.75.6E ; "Jun", case-sensitive + / %x4A.75.6C ; "Jul", case-sensitive + / %x41.75.67 ; "Aug", case-sensitive + / %x53.65.70 ; "Sep", case-sensitive + / %x4F.63.74 ; "Oct", case-sensitive + / %x4E.6F.76 ; "Nov", case-sensitive + / %x44.65.63 ; "Dec", case-sensitive + year = 4DIGIT + + GMT = %x47.4D.54 ; "GMT", case-sensitive + + time-of-day = hour ":" minute ":" second + ; 00:00:00 - 23:59:60 (leap second) + + hour = 2DIGIT + minute = 2DIGIT + second = 2DIGIT + */ +function toIMFDate (date) { + if (typeof date === 'number') { + date = new Date(date) + } + + const days = [ + 'Sun', 'Mon', 'Tue', 'Wed', + 'Thu', 'Fri', 'Sat' + ] + + const months = [ + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec' + ] + + const dayName = days[date.getUTCDay()] + const day = date.getUTCDate().toString().padStart(2, '0') + const month = months[date.getUTCMonth()] + const year = date.getUTCFullYear() + const hour = date.getUTCHours().toString().padStart(2, '0') + const minute = date.getUTCMinutes().toString().padStart(2, '0') + const second = date.getUTCSeconds().toString().padStart(2, '0') + + return `${dayName}, ${day} ${month} ${year} ${hour}:${minute}:${second} GMT` +} + +/** + max-age-av = "Max-Age=" non-zero-digit *DIGIT + ; In practice, both expires-av and max-age-av + ; are limited to dates representable by the + ; user agent. + * @param {number} maxAge + */ +function validateCookieMaxAge (maxAge) { + if (maxAge < 0) { + throw new Error('Invalid cookie max-age') + } +} + +/** + * @see https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1 + * @param {import('./index').Cookie} cookie + */ +function stringify (cookie) { + if (cookie.name.length === 0) { + return null + } + + validateCookieName(cookie.name) + validateCookieValue(cookie.value) + + const out = [`${cookie.name}=${cookie.value}`] + + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.1 + // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.2 + if (cookie.name.startsWith('__Secure-')) { + cookie.secure = true + } + + if (cookie.name.startsWith('__Host-')) { + cookie.secure = true + cookie.domain = null + cookie.path = '/' + } + + if (cookie.secure) { + out.push('Secure') + } + + if (cookie.httpOnly) { + out.push('HttpOnly') + } + + if (typeof cookie.maxAge === 'number') { + validateCookieMaxAge(cookie.maxAge) + out.push(`Max-Age=${cookie.maxAge}`) + } + + if (cookie.domain) { + validateCookieDomain(cookie.domain) + out.push(`Domain=${cookie.domain}`) + } + + if (cookie.path) { + validateCookiePath(cookie.path) + out.push(`Path=${cookie.path}`) + } + + if (cookie.expires && cookie.expires.toString() !== 'Invalid Date') { + out.push(`Expires=${toIMFDate(cookie.expires)}`) + } + + if (cookie.sameSite) { + out.push(`SameSite=${cookie.sameSite}`) + } + + for (const part of cookie.unparsed) { + if (!part.includes('=')) { + throw new Error('Invalid unparsed') + } + + const [key, ...value] = part.split('=') + + out.push(`${key.trim()}=${value.join('=')}`) + } + + return out.join('; ') +} + +let kHeadersListNode + +function getHeadersList (headers) { + if (headers[kHeadersList]) { + return headers[kHeadersList] + } + + if (!kHeadersListNode) { + kHeadersListNode = Object.getOwnPropertySymbols(headers).find( + (symbol) => symbol.description === 'headers list' + ) + + assert(kHeadersListNode, 'Headers cannot be parsed') + } + + const headersList = headers[kHeadersListNode] + assert(headersList) + + return headersList +} + +module.exports = { + isCTLExcludingHtab, + stringify, + getHeadersList +} + + +/***/ }), + +/***/ 9136: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const net = __nccwpck_require__(9278) +const assert = __nccwpck_require__(2613) +const util = __nccwpck_require__(3440) +const { InvalidArgumentError, ConnectTimeoutError } = __nccwpck_require__(8707) + +let tls // include tls conditionally since it is not always available + +// TODO: session re-use does not wait for the first +// connection to resolve the session and might therefore +// resolve the same servername multiple times even when +// re-use is enabled. + +let SessionCache +// FIXME: remove workaround when the Node bug is fixed +// https://github.com/nodejs/node/issues/49344#issuecomment-1741776308 +if (global.FinalizationRegistry && !process.env.NODE_V8_COVERAGE) { + SessionCache = class WeakSessionCache { + constructor (maxCachedSessions) { + this._maxCachedSessions = maxCachedSessions + this._sessionCache = new Map() + this._sessionRegistry = new global.FinalizationRegistry((key) => { + if (this._sessionCache.size < this._maxCachedSessions) { + return + } + + const ref = this._sessionCache.get(key) + if (ref !== undefined && ref.deref() === undefined) { + this._sessionCache.delete(key) + } + }) + } + + get (sessionKey) { + const ref = this._sessionCache.get(sessionKey) + return ref ? ref.deref() : null + } + + set (sessionKey, session) { + if (this._maxCachedSessions === 0) { + return + } + + this._sessionCache.set(sessionKey, new WeakRef(session)) + this._sessionRegistry.register(session, sessionKey) + } + } +} else { + SessionCache = class SimpleSessionCache { + constructor (maxCachedSessions) { + this._maxCachedSessions = maxCachedSessions + this._sessionCache = new Map() + } + + get (sessionKey) { + return this._sessionCache.get(sessionKey) + } + + set (sessionKey, session) { + if (this._maxCachedSessions === 0) { + return + } + + if (this._sessionCache.size >= this._maxCachedSessions) { + // remove the oldest session + const { value: oldestKey } = this._sessionCache.keys().next() + this._sessionCache.delete(oldestKey) + } + + this._sessionCache.set(sessionKey, session) + } + } +} + +function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) { + if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) { + throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero') + } + + const options = { path: socketPath, ...opts } + const sessionCache = new SessionCache(maxCachedSessions == null ? 100 : maxCachedSessions) + timeout = timeout == null ? 10e3 : timeout + allowH2 = allowH2 != null ? allowH2 : false + return function connect ({ hostname, host, protocol, port, servername, localAddress, httpSocket }, callback) { + let socket + if (protocol === 'https:') { + if (!tls) { + tls = __nccwpck_require__(4756) + } + servername = servername || options.servername || util.getServerName(host) || null + + const sessionKey = servername || hostname + const session = sessionCache.get(sessionKey) || null + + assert(sessionKey) + + socket = tls.connect({ + highWaterMark: 16384, // TLS in node can't have bigger HWM anyway... + ...options, + servername, + session, + localAddress, + // TODO(HTTP/2): Add support for h2c + ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'], + socket: httpSocket, // upgrade socket connection + port: port || 443, + host: hostname + }) + + socket + .on('session', function (session) { + // TODO (fix): Can a session become invalid once established? Don't think so? + sessionCache.set(sessionKey, session) + }) + } else { + assert(!httpSocket, 'httpSocket can only be sent on TLS update') + socket = net.connect({ + highWaterMark: 64 * 1024, // Same as nodejs fs streams. + ...options, + localAddress, + port: port || 80, + host: hostname + }) + } + + // Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket + if (options.keepAlive == null || options.keepAlive) { + const keepAliveInitialDelay = options.keepAliveInitialDelay === undefined ? 60e3 : options.keepAliveInitialDelay + socket.setKeepAlive(true, keepAliveInitialDelay) + } + + const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout) + + socket + .setNoDelay(true) + .once(protocol === 'https:' ? 'secureConnect' : 'connect', function () { + cancelTimeout() + + if (callback) { + const cb = callback + callback = null + cb(null, this) + } + }) + .on('error', function (err) { + cancelTimeout() + + if (callback) { + const cb = callback + callback = null + cb(err) + } + }) + + return socket + } +} + +function setupTimeout (onConnectTimeout, timeout) { + if (!timeout) { + return () => {} + } + + let s1 = null + let s2 = null + const timeoutId = setTimeout(() => { + // setImmediate is added to make sure that we priotorise socket error events over timeouts + s1 = setImmediate(() => { + if (process.platform === 'win32') { + // Windows needs an extra setImmediate probably due to implementation differences in the socket logic + s2 = setImmediate(() => onConnectTimeout()) + } else { + onConnectTimeout() + } + }) + }, timeout) + return () => { + clearTimeout(timeoutId) + clearImmediate(s1) + clearImmediate(s2) + } +} + +function onConnectTimeout (socket) { + util.destroy(socket, new ConnectTimeoutError()) +} + +module.exports = buildConnector + + +/***/ }), + +/***/ 735: +/***/ ((module) => { + +"use strict"; + + +/** @type {Record} */ +const headerNameLowerCasedRecord = {} + +// https://developer.mozilla.org/docs/Web/HTTP/Headers +const wellknownHeaderNames = [ + 'Accept', + 'Accept-Encoding', + 'Accept-Language', + 'Accept-Ranges', + 'Access-Control-Allow-Credentials', + 'Access-Control-Allow-Headers', + 'Access-Control-Allow-Methods', + 'Access-Control-Allow-Origin', + 'Access-Control-Expose-Headers', + 'Access-Control-Max-Age', + 'Access-Control-Request-Headers', + 'Access-Control-Request-Method', + 'Age', + 'Allow', + 'Alt-Svc', + 'Alt-Used', + 'Authorization', + 'Cache-Control', + 'Clear-Site-Data', + 'Connection', + 'Content-Disposition', + 'Content-Encoding', + 'Content-Language', + 'Content-Length', + 'Content-Location', + 'Content-Range', + 'Content-Security-Policy', + 'Content-Security-Policy-Report-Only', + 'Content-Type', + 'Cookie', + 'Cross-Origin-Embedder-Policy', + 'Cross-Origin-Opener-Policy', + 'Cross-Origin-Resource-Policy', + 'Date', + 'Device-Memory', + 'Downlink', + 'ECT', + 'ETag', + 'Expect', + 'Expect-CT', + 'Expires', + 'Forwarded', + 'From', + 'Host', + 'If-Match', + 'If-Modified-Since', + 'If-None-Match', + 'If-Range', + 'If-Unmodified-Since', + 'Keep-Alive', + 'Last-Modified', + 'Link', + 'Location', + 'Max-Forwards', + 'Origin', + 'Permissions-Policy', + 'Pragma', + 'Proxy-Authenticate', + 'Proxy-Authorization', + 'RTT', + 'Range', + 'Referer', + 'Referrer-Policy', + 'Refresh', + 'Retry-After', + 'Sec-WebSocket-Accept', + 'Sec-WebSocket-Extensions', + 'Sec-WebSocket-Key', + 'Sec-WebSocket-Protocol', + 'Sec-WebSocket-Version', + 'Server', + 'Server-Timing', + 'Service-Worker-Allowed', + 'Service-Worker-Navigation-Preload', + 'Set-Cookie', + 'SourceMap', + 'Strict-Transport-Security', + 'Supports-Loading-Mode', + 'TE', + 'Timing-Allow-Origin', + 'Trailer', + 'Transfer-Encoding', + 'Upgrade', + 'Upgrade-Insecure-Requests', + 'User-Agent', + 'Vary', + 'Via', + 'WWW-Authenticate', + 'X-Content-Type-Options', + 'X-DNS-Prefetch-Control', + 'X-Frame-Options', + 'X-Permitted-Cross-Domain-Policies', + 'X-Powered-By', + 'X-Requested-With', + 'X-XSS-Protection' +] + +for (let i = 0; i < wellknownHeaderNames.length; ++i) { + const key = wellknownHeaderNames[i] + const lowerCasedKey = key.toLowerCase() + headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] = + lowerCasedKey +} + +// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. +Object.setPrototypeOf(headerNameLowerCasedRecord, null) + +module.exports = { + wellknownHeaderNames, + headerNameLowerCasedRecord +} + + +/***/ }), + +/***/ 8707: +/***/ ((module) => { + +"use strict"; + + +class UndiciError extends Error { + constructor (message) { + super(message) + this.name = 'UndiciError' + this.code = 'UND_ERR' + } +} + +class ConnectTimeoutError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ConnectTimeoutError) + this.name = 'ConnectTimeoutError' + this.message = message || 'Connect Timeout Error' + this.code = 'UND_ERR_CONNECT_TIMEOUT' + } +} + +class HeadersTimeoutError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, HeadersTimeoutError) + this.name = 'HeadersTimeoutError' + this.message = message || 'Headers Timeout Error' + this.code = 'UND_ERR_HEADERS_TIMEOUT' + } +} + +class HeadersOverflowError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, HeadersOverflowError) + this.name = 'HeadersOverflowError' + this.message = message || 'Headers Overflow Error' + this.code = 'UND_ERR_HEADERS_OVERFLOW' + } +} + +class BodyTimeoutError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, BodyTimeoutError) + this.name = 'BodyTimeoutError' + this.message = message || 'Body Timeout Error' + this.code = 'UND_ERR_BODY_TIMEOUT' + } +} + +class ResponseStatusCodeError extends UndiciError { + constructor (message, statusCode, headers, body) { + super(message) + Error.captureStackTrace(this, ResponseStatusCodeError) + this.name = 'ResponseStatusCodeError' + this.message = message || 'Response Status Code Error' + this.code = 'UND_ERR_RESPONSE_STATUS_CODE' + this.body = body + this.status = statusCode + this.statusCode = statusCode + this.headers = headers + } +} + +class InvalidArgumentError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, InvalidArgumentError) + this.name = 'InvalidArgumentError' + this.message = message || 'Invalid Argument Error' + this.code = 'UND_ERR_INVALID_ARG' + } +} + +class InvalidReturnValueError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, InvalidReturnValueError) + this.name = 'InvalidReturnValueError' + this.message = message || 'Invalid Return Value Error' + this.code = 'UND_ERR_INVALID_RETURN_VALUE' + } +} + +class RequestAbortedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, RequestAbortedError) + this.name = 'AbortError' + this.message = message || 'Request aborted' + this.code = 'UND_ERR_ABORTED' + } +} + +class InformationalError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, InformationalError) + this.name = 'InformationalError' + this.message = message || 'Request information' + this.code = 'UND_ERR_INFO' + } +} + +class RequestContentLengthMismatchError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, RequestContentLengthMismatchError) + this.name = 'RequestContentLengthMismatchError' + this.message = message || 'Request body length does not match content-length header' + this.code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH' + } +} + +class ResponseContentLengthMismatchError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ResponseContentLengthMismatchError) + this.name = 'ResponseContentLengthMismatchError' + this.message = message || 'Response body length does not match content-length header' + this.code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH' + } +} + +class ClientDestroyedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ClientDestroyedError) + this.name = 'ClientDestroyedError' + this.message = message || 'The client is destroyed' + this.code = 'UND_ERR_DESTROYED' + } +} + +class ClientClosedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ClientClosedError) + this.name = 'ClientClosedError' + this.message = message || 'The client is closed' + this.code = 'UND_ERR_CLOSED' + } +} + +class SocketError extends UndiciError { + constructor (message, socket) { + super(message) + Error.captureStackTrace(this, SocketError) + this.name = 'SocketError' + this.message = message || 'Socket error' + this.code = 'UND_ERR_SOCKET' + this.socket = socket + } +} + +class NotSupportedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, NotSupportedError) + this.name = 'NotSupportedError' + this.message = message || 'Not supported error' + this.code = 'UND_ERR_NOT_SUPPORTED' + } +} + +class BalancedPoolMissingUpstreamError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, NotSupportedError) + this.name = 'MissingUpstreamError' + this.message = message || 'No upstream has been added to the BalancedPool' + this.code = 'UND_ERR_BPL_MISSING_UPSTREAM' + } +} + +class HTTPParserError extends Error { + constructor (message, code, data) { + super(message) + Error.captureStackTrace(this, HTTPParserError) + this.name = 'HTTPParserError' + this.code = code ? `HPE_${code}` : undefined + this.data = data ? data.toString() : undefined + } +} + +class ResponseExceededMaxSizeError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, ResponseExceededMaxSizeError) + this.name = 'ResponseExceededMaxSizeError' + this.message = message || 'Response content exceeded max size' + this.code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE' + } +} + +class RequestRetryError extends UndiciError { + constructor (message, code, { headers, data }) { + super(message) + Error.captureStackTrace(this, RequestRetryError) + this.name = 'RequestRetryError' + this.message = message || 'Request retry error' + this.code = 'UND_ERR_REQ_RETRY' + this.statusCode = code + this.data = data + this.headers = headers + } +} + +module.exports = { + HTTPParserError, + UndiciError, + HeadersTimeoutError, + HeadersOverflowError, + BodyTimeoutError, + RequestContentLengthMismatchError, + ConnectTimeoutError, + ResponseStatusCodeError, + InvalidArgumentError, + InvalidReturnValueError, + RequestAbortedError, + ClientDestroyedError, + ClientClosedError, + InformationalError, + SocketError, + NotSupportedError, + ResponseContentLengthMismatchError, + BalancedPoolMissingUpstreamError, + ResponseExceededMaxSizeError, + RequestRetryError +} + + +/***/ }), + +/***/ 4655: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { + InvalidArgumentError, + NotSupportedError +} = __nccwpck_require__(8707) +const assert = __nccwpck_require__(2613) +const { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = __nccwpck_require__(6443) +const util = __nccwpck_require__(3440) + +// tokenRegExp and headerCharRegex have been lifted from +// https://github.com/nodejs/node/blob/main/lib/_http_common.js + +/** + * Verifies that the given val is a valid HTTP token + * per the rules defined in RFC 7230 + * See https://tools.ietf.org/html/rfc7230#section-3.2.6 + */ +const tokenRegExp = /^[\^_`a-zA-Z\-0-9!#$%&'*+.|~]+$/ + +/** + * Matches if val contains an invalid field-vchar + * field-value = *( field-content / obs-fold ) + * field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] + * field-vchar = VCHAR / obs-text + */ +const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/ + +// Verifies that a given path is valid does not contain control chars \x00 to \x20 +const invalidPathRegex = /[^\u0021-\u00ff]/ + +const kHandler = Symbol('handler') + +const channels = {} + +let extractBody + +try { + const diagnosticsChannel = __nccwpck_require__(1637) + channels.create = diagnosticsChannel.channel('undici:request:create') + channels.bodySent = diagnosticsChannel.channel('undici:request:bodySent') + channels.headers = diagnosticsChannel.channel('undici:request:headers') + channels.trailers = diagnosticsChannel.channel('undici:request:trailers') + channels.error = diagnosticsChannel.channel('undici:request:error') +} catch { + channels.create = { hasSubscribers: false } + channels.bodySent = { hasSubscribers: false } + channels.headers = { hasSubscribers: false } + channels.trailers = { hasSubscribers: false } + channels.error = { hasSubscribers: false } +} + +class Request { + constructor (origin, { + path, + method, + body, + headers, + query, + idempotent, + blocking, + upgrade, + headersTimeout, + bodyTimeout, + reset, + throwOnError, + expectContinue + }, handler) { + if (typeof path !== 'string') { + throw new InvalidArgumentError('path must be a string') + } else if ( + path[0] !== '/' && + !(path.startsWith('http://') || path.startsWith('https://')) && + method !== 'CONNECT' + ) { + throw new InvalidArgumentError('path must be an absolute URL or start with a slash') + } else if (invalidPathRegex.exec(path) !== null) { + throw new InvalidArgumentError('invalid request path') + } + + if (typeof method !== 'string') { + throw new InvalidArgumentError('method must be a string') + } else if (tokenRegExp.exec(method) === null) { + throw new InvalidArgumentError('invalid request method') + } + + if (upgrade && typeof upgrade !== 'string') { + throw new InvalidArgumentError('upgrade must be a string') + } + + if (headersTimeout != null && (!Number.isFinite(headersTimeout) || headersTimeout < 0)) { + throw new InvalidArgumentError('invalid headersTimeout') + } + + if (bodyTimeout != null && (!Number.isFinite(bodyTimeout) || bodyTimeout < 0)) { + throw new InvalidArgumentError('invalid bodyTimeout') + } + + if (reset != null && typeof reset !== 'boolean') { + throw new InvalidArgumentError('invalid reset') + } + + if (expectContinue != null && typeof expectContinue !== 'boolean') { + throw new InvalidArgumentError('invalid expectContinue') + } + + this.headersTimeout = headersTimeout + + this.bodyTimeout = bodyTimeout + + this.throwOnError = throwOnError === true + + this.method = method + + this.abort = null + + if (body == null) { + this.body = null + } else if (util.isStream(body)) { + this.body = body + + const rState = this.body._readableState + if (!rState || !rState.autoDestroy) { + this.endHandler = function autoDestroy () { + util.destroy(this) + } + this.body.on('end', this.endHandler) + } + + this.errorHandler = err => { + if (this.abort) { + this.abort(err) + } else { + this.error = err + } + } + this.body.on('error', this.errorHandler) + } else if (util.isBuffer(body)) { + this.body = body.byteLength ? body : null + } else if (ArrayBuffer.isView(body)) { + this.body = body.buffer.byteLength ? Buffer.from(body.buffer, body.byteOffset, body.byteLength) : null + } else if (body instanceof ArrayBuffer) { + this.body = body.byteLength ? Buffer.from(body) : null + } else if (typeof body === 'string') { + this.body = body.length ? Buffer.from(body) : null + } else if (util.isFormDataLike(body) || util.isIterable(body) || util.isBlobLike(body)) { + this.body = body + } else { + throw new InvalidArgumentError('body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable') + } + + this.completed = false + + this.aborted = false + + this.upgrade = upgrade || null + + this.path = query ? util.buildURL(path, query) : path + + this.origin = origin + + this.idempotent = idempotent == null + ? method === 'HEAD' || method === 'GET' + : idempotent + + this.blocking = blocking == null ? false : blocking + + this.reset = reset == null ? null : reset + + this.host = null + + this.contentLength = null + + this.contentType = null + + this.headers = '' + + // Only for H2 + this.expectContinue = expectContinue != null ? expectContinue : false + + if (Array.isArray(headers)) { + if (headers.length % 2 !== 0) { + throw new InvalidArgumentError('headers array must be even') + } + for (let i = 0; i < headers.length; i += 2) { + processHeader(this, headers[i], headers[i + 1]) + } + } else if (headers && typeof headers === 'object') { + const keys = Object.keys(headers) + for (let i = 0; i < keys.length; i++) { + const key = keys[i] + processHeader(this, key, headers[key]) + } + } else if (headers != null) { + throw new InvalidArgumentError('headers must be an object or an array') + } + + if (util.isFormDataLike(this.body)) { + if (util.nodeMajor < 16 || (util.nodeMajor === 16 && util.nodeMinor < 8)) { + throw new InvalidArgumentError('Form-Data bodies are only supported in node v16.8 and newer.') + } + + if (!extractBody) { + extractBody = (__nccwpck_require__(8923).extractBody) + } + + const [bodyStream, contentType] = extractBody(body) + if (this.contentType == null) { + this.contentType = contentType + this.headers += `content-type: ${contentType}\r\n` + } + this.body = bodyStream.stream + this.contentLength = bodyStream.length + } else if (util.isBlobLike(body) && this.contentType == null && body.type) { + this.contentType = body.type + this.headers += `content-type: ${body.type}\r\n` + } + + util.validateHandler(handler, method, upgrade) + + this.servername = util.getServerName(this.host) + + this[kHandler] = handler + + if (channels.create.hasSubscribers) { + channels.create.publish({ request: this }) + } + } + + onBodySent (chunk) { + if (this[kHandler].onBodySent) { + try { + return this[kHandler].onBodySent(chunk) + } catch (err) { + this.abort(err) + } + } + } + + onRequestSent () { + if (channels.bodySent.hasSubscribers) { + channels.bodySent.publish({ request: this }) + } + + if (this[kHandler].onRequestSent) { + try { + return this[kHandler].onRequestSent() + } catch (err) { + this.abort(err) + } + } + } + + onConnect (abort) { + assert(!this.aborted) + assert(!this.completed) + + if (this.error) { + abort(this.error) + } else { + this.abort = abort + return this[kHandler].onConnect(abort) + } + } + + onHeaders (statusCode, headers, resume, statusText) { + assert(!this.aborted) + assert(!this.completed) + + if (channels.headers.hasSubscribers) { + channels.headers.publish({ request: this, response: { statusCode, headers, statusText } }) + } + + try { + return this[kHandler].onHeaders(statusCode, headers, resume, statusText) + } catch (err) { + this.abort(err) + } + } + + onData (chunk) { + assert(!this.aborted) + assert(!this.completed) + + try { + return this[kHandler].onData(chunk) + } catch (err) { + this.abort(err) + return false + } + } + + onUpgrade (statusCode, headers, socket) { + assert(!this.aborted) + assert(!this.completed) + + return this[kHandler].onUpgrade(statusCode, headers, socket) + } + + onComplete (trailers) { + this.onFinally() + + assert(!this.aborted) + + this.completed = true + if (channels.trailers.hasSubscribers) { + channels.trailers.publish({ request: this, trailers }) + } + + try { + return this[kHandler].onComplete(trailers) + } catch (err) { + // TODO (fix): This might be a bad idea? + this.onError(err) + } + } + + onError (error) { + this.onFinally() + + if (channels.error.hasSubscribers) { + channels.error.publish({ request: this, error }) + } + + if (this.aborted) { + return + } + this.aborted = true + + return this[kHandler].onError(error) + } + + onFinally () { + if (this.errorHandler) { + this.body.off('error', this.errorHandler) + this.errorHandler = null + } + + if (this.endHandler) { + this.body.off('end', this.endHandler) + this.endHandler = null + } + } + + // TODO: adjust to support H2 + addHeader (key, value) { + processHeader(this, key, value) + return this + } + + static [kHTTP1BuildRequest] (origin, opts, handler) { + // TODO: Migrate header parsing here, to make Requests + // HTTP agnostic + return new Request(origin, opts, handler) + } + + static [kHTTP2BuildRequest] (origin, opts, handler) { + const headers = opts.headers + opts = { ...opts, headers: null } + + const request = new Request(origin, opts, handler) + + request.headers = {} + + if (Array.isArray(headers)) { + if (headers.length % 2 !== 0) { + throw new InvalidArgumentError('headers array must be even') + } + for (let i = 0; i < headers.length; i += 2) { + processHeader(request, headers[i], headers[i + 1], true) + } + } else if (headers && typeof headers === 'object') { + const keys = Object.keys(headers) + for (let i = 0; i < keys.length; i++) { + const key = keys[i] + processHeader(request, key, headers[key], true) + } + } else if (headers != null) { + throw new InvalidArgumentError('headers must be an object or an array') + } + + return request + } + + static [kHTTP2CopyHeaders] (raw) { + const rawHeaders = raw.split('\r\n') + const headers = {} + + for (const header of rawHeaders) { + const [key, value] = header.split(': ') + + if (value == null || value.length === 0) continue + + if (headers[key]) headers[key] += `,${value}` + else headers[key] = value + } + + return headers + } +} + +function processHeaderValue (key, val, skipAppend) { + if (val && typeof val === 'object') { + throw new InvalidArgumentError(`invalid ${key} header`) + } + + val = val != null ? `${val}` : '' + + if (headerCharRegex.exec(val) !== null) { + throw new InvalidArgumentError(`invalid ${key} header`) + } + + return skipAppend ? val : `${key}: ${val}\r\n` +} + +function processHeader (request, key, val, skipAppend = false) { + if (val && (typeof val === 'object' && !Array.isArray(val))) { + throw new InvalidArgumentError(`invalid ${key} header`) + } else if (val === undefined) { + return + } + + if ( + request.host === null && + key.length === 4 && + key.toLowerCase() === 'host' + ) { + if (headerCharRegex.exec(val) !== null) { + throw new InvalidArgumentError(`invalid ${key} header`) + } + // Consumed by Client + request.host = val + } else if ( + request.contentLength === null && + key.length === 14 && + key.toLowerCase() === 'content-length' + ) { + request.contentLength = parseInt(val, 10) + if (!Number.isFinite(request.contentLength)) { + throw new InvalidArgumentError('invalid content-length header') + } + } else if ( + request.contentType === null && + key.length === 12 && + key.toLowerCase() === 'content-type' + ) { + request.contentType = val + if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend) + else request.headers += processHeaderValue(key, val) + } else if ( + key.length === 17 && + key.toLowerCase() === 'transfer-encoding' + ) { + throw new InvalidArgumentError('invalid transfer-encoding header') + } else if ( + key.length === 10 && + key.toLowerCase() === 'connection' + ) { + const value = typeof val === 'string' ? val.toLowerCase() : null + if (value !== 'close' && value !== 'keep-alive') { + throw new InvalidArgumentError('invalid connection header') + } else if (value === 'close') { + request.reset = true + } + } else if ( + key.length === 10 && + key.toLowerCase() === 'keep-alive' + ) { + throw new InvalidArgumentError('invalid keep-alive header') + } else if ( + key.length === 7 && + key.toLowerCase() === 'upgrade' + ) { + throw new InvalidArgumentError('invalid upgrade header') + } else if ( + key.length === 6 && + key.toLowerCase() === 'expect' + ) { + throw new NotSupportedError('expect header not supported') + } else if (tokenRegExp.exec(key) === null) { + throw new InvalidArgumentError('invalid header key') + } else { + if (Array.isArray(val)) { + for (let i = 0; i < val.length; i++) { + if (skipAppend) { + if (request.headers[key]) request.headers[key] += `,${processHeaderValue(key, val[i], skipAppend)}` + else request.headers[key] = processHeaderValue(key, val[i], skipAppend) + } else { + request.headers += processHeaderValue(key, val[i]) + } + } + } else { + if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend) + else request.headers += processHeaderValue(key, val) + } + } +} + +module.exports = Request + + +/***/ }), + +/***/ 6443: +/***/ ((module) => { + +module.exports = { + kClose: Symbol('close'), + kDestroy: Symbol('destroy'), + kDispatch: Symbol('dispatch'), + kUrl: Symbol('url'), + kWriting: Symbol('writing'), + kResuming: Symbol('resuming'), + kQueue: Symbol('queue'), + kConnect: Symbol('connect'), + kConnecting: Symbol('connecting'), + kHeadersList: Symbol('headers list'), + kKeepAliveDefaultTimeout: Symbol('default keep alive timeout'), + kKeepAliveMaxTimeout: Symbol('max keep alive timeout'), + kKeepAliveTimeoutThreshold: Symbol('keep alive timeout threshold'), + kKeepAliveTimeoutValue: Symbol('keep alive timeout'), + kKeepAlive: Symbol('keep alive'), + kHeadersTimeout: Symbol('headers timeout'), + kBodyTimeout: Symbol('body timeout'), + kServerName: Symbol('server name'), + kLocalAddress: Symbol('local address'), + kHost: Symbol('host'), + kNoRef: Symbol('no ref'), + kBodyUsed: Symbol('used'), + kRunning: Symbol('running'), + kBlocking: Symbol('blocking'), + kPending: Symbol('pending'), + kSize: Symbol('size'), + kBusy: Symbol('busy'), + kQueued: Symbol('queued'), + kFree: Symbol('free'), + kConnected: Symbol('connected'), + kClosed: Symbol('closed'), + kNeedDrain: Symbol('need drain'), + kReset: Symbol('reset'), + kDestroyed: Symbol.for('nodejs.stream.destroyed'), + kMaxHeadersSize: Symbol('max headers size'), + kRunningIdx: Symbol('running index'), + kPendingIdx: Symbol('pending index'), + kError: Symbol('error'), + kClients: Symbol('clients'), + kClient: Symbol('client'), + kParser: Symbol('parser'), + kOnDestroyed: Symbol('destroy callbacks'), + kPipelining: Symbol('pipelining'), + kSocket: Symbol('socket'), + kHostHeader: Symbol('host header'), + kConnector: Symbol('connector'), + kStrictContentLength: Symbol('strict content length'), + kMaxRedirections: Symbol('maxRedirections'), + kMaxRequests: Symbol('maxRequestsPerClient'), + kProxy: Symbol('proxy agent options'), + kCounter: Symbol('socket request counter'), + kInterceptors: Symbol('dispatch interceptors'), + kMaxResponseSize: Symbol('max response size'), + kHTTP2Session: Symbol('http2Session'), + kHTTP2SessionState: Symbol('http2Session state'), + kHTTP2BuildRequest: Symbol('http2 build request'), + kHTTP1BuildRequest: Symbol('http1 build request'), + kHTTP2CopyHeaders: Symbol('http2 copy headers'), + kHTTPConnVersion: Symbol('http connection version'), + kRetryHandlerDefaultRetry: Symbol('retry agent default retry'), + kConstruct: Symbol('constructable') +} + + +/***/ }), + +/***/ 3440: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const assert = __nccwpck_require__(2613) +const { kDestroyed, kBodyUsed } = __nccwpck_require__(6443) +const { IncomingMessage } = __nccwpck_require__(8611) +const stream = __nccwpck_require__(2203) +const net = __nccwpck_require__(9278) +const { InvalidArgumentError } = __nccwpck_require__(8707) +const { Blob } = __nccwpck_require__(181) +const nodeUtil = __nccwpck_require__(9023) +const { stringify } = __nccwpck_require__(3480) +const { headerNameLowerCasedRecord } = __nccwpck_require__(735) + +const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) + +function nop () {} + +function isStream (obj) { + return obj && typeof obj === 'object' && typeof obj.pipe === 'function' && typeof obj.on === 'function' +} + +// based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License) +function isBlobLike (object) { + return (Blob && object instanceof Blob) || ( + object && + typeof object === 'object' && + (typeof object.stream === 'function' || + typeof object.arrayBuffer === 'function') && + /^(Blob|File)$/.test(object[Symbol.toStringTag]) + ) +} + +function buildURL (url, queryParams) { + if (url.includes('?') || url.includes('#')) { + throw new Error('Query params cannot be passed when url already contains "?" or "#".') + } + + const stringified = stringify(queryParams) + + if (stringified) { + url += '?' + stringified + } + + return url +} + +function parseURL (url) { + if (typeof url === 'string') { + url = new URL(url) + + if (!/^https?:/.test(url.origin || url.protocol)) { + throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.') + } + + return url + } + + if (!url || typeof url !== 'object') { + throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.') + } + + if (!/^https?:/.test(url.origin || url.protocol)) { + throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.') + } + + if (!(url instanceof URL)) { + if (url.port != null && url.port !== '' && !Number.isFinite(parseInt(url.port))) { + throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.') + } + + if (url.path != null && typeof url.path !== 'string') { + throw new InvalidArgumentError('Invalid URL path: the path must be a string or null/undefined.') + } + + if (url.pathname != null && typeof url.pathname !== 'string') { + throw new InvalidArgumentError('Invalid URL pathname: the pathname must be a string or null/undefined.') + } + + if (url.hostname != null && typeof url.hostname !== 'string') { + throw new InvalidArgumentError('Invalid URL hostname: the hostname must be a string or null/undefined.') + } + + if (url.origin != null && typeof url.origin !== 'string') { + throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.') + } + + const port = url.port != null + ? url.port + : (url.protocol === 'https:' ? 443 : 80) + let origin = url.origin != null + ? url.origin + : `${url.protocol}//${url.hostname}:${port}` + let path = url.path != null + ? url.path + : `${url.pathname || ''}${url.search || ''}` + + if (origin.endsWith('/')) { + origin = origin.substring(0, origin.length - 1) + } + + if (path && !path.startsWith('/')) { + path = `/${path}` + } + // new URL(path, origin) is unsafe when `path` contains an absolute URL + // From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL: + // If first parameter is a relative URL, second param is required, and will be used as the base URL. + // If first parameter is an absolute URL, a given second param will be ignored. + url = new URL(origin + path) + } + + return url +} + +function parseOrigin (url) { + url = parseURL(url) + + if (url.pathname !== '/' || url.search || url.hash) { + throw new InvalidArgumentError('invalid url') + } + + return url +} + +function getHostname (host) { + if (host[0] === '[') { + const idx = host.indexOf(']') + + assert(idx !== -1) + return host.substring(1, idx) + } + + const idx = host.indexOf(':') + if (idx === -1) return host + + return host.substring(0, idx) +} + +// IP addresses are not valid server names per RFC6066 +// > Currently, the only server names supported are DNS hostnames +function getServerName (host) { + if (!host) { + return null + } + + assert.strictEqual(typeof host, 'string') + + const servername = getHostname(host) + if (net.isIP(servername)) { + return '' + } + + return servername +} + +function deepClone (obj) { + return JSON.parse(JSON.stringify(obj)) +} + +function isAsyncIterable (obj) { + return !!(obj != null && typeof obj[Symbol.asyncIterator] === 'function') +} + +function isIterable (obj) { + return !!(obj != null && (typeof obj[Symbol.iterator] === 'function' || typeof obj[Symbol.asyncIterator] === 'function')) +} + +function bodyLength (body) { + if (body == null) { + return 0 + } else if (isStream(body)) { + const state = body._readableState + return state && state.objectMode === false && state.ended === true && Number.isFinite(state.length) + ? state.length + : null + } else if (isBlobLike(body)) { + return body.size != null ? body.size : null + } else if (isBuffer(body)) { + return body.byteLength + } + + return null +} + +function isDestroyed (stream) { + return !stream || !!(stream.destroyed || stream[kDestroyed]) +} + +function isReadableAborted (stream) { + const state = stream && stream._readableState + return isDestroyed(stream) && state && !state.endEmitted +} + +function destroy (stream, err) { + if (stream == null || !isStream(stream) || isDestroyed(stream)) { + return + } + + if (typeof stream.destroy === 'function') { + if (Object.getPrototypeOf(stream).constructor === IncomingMessage) { + // See: https://github.com/nodejs/node/pull/38505/files + stream.socket = null + } + + stream.destroy(err) + } else if (err) { + process.nextTick((stream, err) => { + stream.emit('error', err) + }, stream, err) + } + + if (stream.destroyed !== true) { + stream[kDestroyed] = true + } +} + +const KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/ +function parseKeepAliveTimeout (val) { + const m = val.toString().match(KEEPALIVE_TIMEOUT_EXPR) + return m ? parseInt(m[1], 10) * 1000 : null +} + +/** + * Retrieves a header name and returns its lowercase value. + * @param {string | Buffer} value Header name + * @returns {string} + */ +function headerNameToString (value) { + return headerNameLowerCasedRecord[value] || value.toLowerCase() +} + +function parseHeaders (headers, obj = {}) { + // For H2 support + if (!Array.isArray(headers)) return headers + + for (let i = 0; i < headers.length; i += 2) { + const key = headers[i].toString().toLowerCase() + let val = obj[key] + + if (!val) { + if (Array.isArray(headers[i + 1])) { + obj[key] = headers[i + 1].map(x => x.toString('utf8')) + } else { + obj[key] = headers[i + 1].toString('utf8') + } + } else { + if (!Array.isArray(val)) { + val = [val] + obj[key] = val + } + val.push(headers[i + 1].toString('utf8')) + } + } + + // See https://github.com/nodejs/node/pull/46528 + if ('content-length' in obj && 'content-disposition' in obj) { + obj['content-disposition'] = Buffer.from(obj['content-disposition']).toString('latin1') + } + + return obj +} + +function parseRawHeaders (headers) { + const ret = [] + let hasContentLength = false + let contentDispositionIdx = -1 + + for (let n = 0; n < headers.length; n += 2) { + const key = headers[n + 0].toString() + const val = headers[n + 1].toString('utf8') + + if (key.length === 14 && (key === 'content-length' || key.toLowerCase() === 'content-length')) { + ret.push(key, val) + hasContentLength = true + } else if (key.length === 19 && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) { + contentDispositionIdx = ret.push(key, val) - 1 + } else { + ret.push(key, val) + } + } + + // See https://github.com/nodejs/node/pull/46528 + if (hasContentLength && contentDispositionIdx !== -1) { + ret[contentDispositionIdx] = Buffer.from(ret[contentDispositionIdx]).toString('latin1') + } + + return ret +} + +function isBuffer (buffer) { + // See, https://github.com/mcollina/undici/pull/319 + return buffer instanceof Uint8Array || Buffer.isBuffer(buffer) +} + +function validateHandler (handler, method, upgrade) { + if (!handler || typeof handler !== 'object') { + throw new InvalidArgumentError('handler must be an object') + } + + if (typeof handler.onConnect !== 'function') { + throw new InvalidArgumentError('invalid onConnect method') + } + + if (typeof handler.onError !== 'function') { + throw new InvalidArgumentError('invalid onError method') + } + + if (typeof handler.onBodySent !== 'function' && handler.onBodySent !== undefined) { + throw new InvalidArgumentError('invalid onBodySent method') + } + + if (upgrade || method === 'CONNECT') { + if (typeof handler.onUpgrade !== 'function') { + throw new InvalidArgumentError('invalid onUpgrade method') + } + } else { + if (typeof handler.onHeaders !== 'function') { + throw new InvalidArgumentError('invalid onHeaders method') + } + + if (typeof handler.onData !== 'function') { + throw new InvalidArgumentError('invalid onData method') + } + + if (typeof handler.onComplete !== 'function') { + throw new InvalidArgumentError('invalid onComplete method') + } + } +} + +// A body is disturbed if it has been read from and it cannot +// be re-used without losing state or data. +function isDisturbed (body) { + return !!(body && ( + stream.isDisturbed + ? stream.isDisturbed(body) || body[kBodyUsed] // TODO (fix): Why is body[kBodyUsed] needed? + : body[kBodyUsed] || + body.readableDidRead || + (body._readableState && body._readableState.dataEmitted) || + isReadableAborted(body) + )) +} + +function isErrored (body) { + return !!(body && ( + stream.isErrored + ? stream.isErrored(body) + : /state: 'errored'/.test(nodeUtil.inspect(body) + ))) +} + +function isReadable (body) { + return !!(body && ( + stream.isReadable + ? stream.isReadable(body) + : /state: 'readable'/.test(nodeUtil.inspect(body) + ))) +} + +function getSocketInfo (socket) { + return { + localAddress: socket.localAddress, + localPort: socket.localPort, + remoteAddress: socket.remoteAddress, + remotePort: socket.remotePort, + remoteFamily: socket.remoteFamily, + timeout: socket.timeout, + bytesWritten: socket.bytesWritten, + bytesRead: socket.bytesRead + } +} + +async function * convertIterableToBuffer (iterable) { + for await (const chunk of iterable) { + yield Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk) + } +} + +let ReadableStream +function ReadableStreamFrom (iterable) { + if (!ReadableStream) { + ReadableStream = (__nccwpck_require__(3774).ReadableStream) + } + + if (ReadableStream.from) { + return ReadableStream.from(convertIterableToBuffer(iterable)) + } + + let iterator + return new ReadableStream( + { + async start () { + iterator = iterable[Symbol.asyncIterator]() + }, + async pull (controller) { + const { done, value } = await iterator.next() + if (done) { + queueMicrotask(() => { + controller.close() + }) + } else { + const buf = Buffer.isBuffer(value) ? value : Buffer.from(value) + controller.enqueue(new Uint8Array(buf)) + } + return controller.desiredSize > 0 + }, + async cancel (reason) { + await iterator.return() + } + }, + 0 + ) +} + +// The chunk should be a FormData instance and contains +// all the required methods. +function isFormDataLike (object) { + return ( + object && + typeof object === 'object' && + typeof object.append === 'function' && + typeof object.delete === 'function' && + typeof object.get === 'function' && + typeof object.getAll === 'function' && + typeof object.has === 'function' && + typeof object.set === 'function' && + object[Symbol.toStringTag] === 'FormData' + ) +} + +function throwIfAborted (signal) { + if (!signal) { return } + if (typeof signal.throwIfAborted === 'function') { + signal.throwIfAborted() + } else { + if (signal.aborted) { + // DOMException not available < v17.0.0 + const err = new Error('The operation was aborted') + err.name = 'AbortError' + throw err + } + } +} + +function addAbortListener (signal, listener) { + if ('addEventListener' in signal) { + signal.addEventListener('abort', listener, { once: true }) + return () => signal.removeEventListener('abort', listener) + } + signal.addListener('abort', listener) + return () => signal.removeListener('abort', listener) +} + +const hasToWellFormed = !!String.prototype.toWellFormed + +/** + * @param {string} val + */ +function toUSVString (val) { + if (hasToWellFormed) { + return `${val}`.toWellFormed() + } else if (nodeUtil.toUSVString) { + return nodeUtil.toUSVString(val) + } + + return `${val}` +} + +// Parsed accordingly to RFC 9110 +// https://www.rfc-editor.org/rfc/rfc9110#field.content-range +function parseRangeHeader (range) { + if (range == null || range === '') return { start: 0, end: null, size: null } + + const m = range ? range.match(/^bytes (\d+)-(\d+)\/(\d+)?$/) : null + return m + ? { + start: parseInt(m[1]), + end: m[2] ? parseInt(m[2]) : null, + size: m[3] ? parseInt(m[3]) : null + } + : null +} + +const kEnumerableProperty = Object.create(null) +kEnumerableProperty.enumerable = true + +module.exports = { + kEnumerableProperty, + nop, + isDisturbed, + isErrored, + isReadable, + toUSVString, + isReadableAborted, + isBlobLike, + parseOrigin, + parseURL, + getServerName, + isStream, + isIterable, + isAsyncIterable, + isDestroyed, + headerNameToString, + parseRawHeaders, + parseHeaders, + parseKeepAliveTimeout, + destroy, + bodyLength, + deepClone, + ReadableStreamFrom, + isBuffer, + validateHandler, + getSocketInfo, + isFormDataLike, + buildURL, + throwIfAborted, + addAbortListener, + parseRangeHeader, + nodeMajor, + nodeMinor, + nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13), + safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE'] +} + + +/***/ }), + +/***/ 1: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const Dispatcher = __nccwpck_require__(992) +const { + ClientDestroyedError, + ClientClosedError, + InvalidArgumentError +} = __nccwpck_require__(8707) +const { kDestroy, kClose, kDispatch, kInterceptors } = __nccwpck_require__(6443) + +const kDestroyed = Symbol('destroyed') +const kClosed = Symbol('closed') +const kOnDestroyed = Symbol('onDestroyed') +const kOnClosed = Symbol('onClosed') +const kInterceptedDispatch = Symbol('Intercepted Dispatch') + +class DispatcherBase extends Dispatcher { + constructor () { + super() + + this[kDestroyed] = false + this[kOnDestroyed] = null + this[kClosed] = false + this[kOnClosed] = [] + } + + get destroyed () { + return this[kDestroyed] + } + + get closed () { + return this[kClosed] + } + + get interceptors () { + return this[kInterceptors] + } + + set interceptors (newInterceptors) { + if (newInterceptors) { + for (let i = newInterceptors.length - 1; i >= 0; i--) { + const interceptor = this[kInterceptors][i] + if (typeof interceptor !== 'function') { + throw new InvalidArgumentError('interceptor must be an function') + } + } + } + + this[kInterceptors] = newInterceptors + } + + close (callback) { + if (callback === undefined) { + return new Promise((resolve, reject) => { + this.close((err, data) => { + return err ? reject(err) : resolve(data) + }) + }) + } + + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } + + if (this[kDestroyed]) { + queueMicrotask(() => callback(new ClientDestroyedError(), null)) + return + } + + if (this[kClosed]) { + if (this[kOnClosed]) { + this[kOnClosed].push(callback) + } else { + queueMicrotask(() => callback(null, null)) + } + return + } + + this[kClosed] = true + this[kOnClosed].push(callback) + + const onClosed = () => { + const callbacks = this[kOnClosed] + this[kOnClosed] = null + for (let i = 0; i < callbacks.length; i++) { + callbacks[i](null, null) + } + } + + // Should not error. + this[kClose]() + .then(() => this.destroy()) + .then(() => { + queueMicrotask(onClosed) + }) + } + + destroy (err, callback) { + if (typeof err === 'function') { + callback = err + err = null + } + + if (callback === undefined) { + return new Promise((resolve, reject) => { + this.destroy(err, (err, data) => { + return err ? /* istanbul ignore next: should never error */ reject(err) : resolve(data) + }) + }) + } + + if (typeof callback !== 'function') { + throw new InvalidArgumentError('invalid callback') + } + + if (this[kDestroyed]) { + if (this[kOnDestroyed]) { + this[kOnDestroyed].push(callback) + } else { + queueMicrotask(() => callback(null, null)) + } + return + } + + if (!err) { + err = new ClientDestroyedError() + } + + this[kDestroyed] = true + this[kOnDestroyed] = this[kOnDestroyed] || [] + this[kOnDestroyed].push(callback) + + const onDestroyed = () => { + const callbacks = this[kOnDestroyed] + this[kOnDestroyed] = null + for (let i = 0; i < callbacks.length; i++) { + callbacks[i](null, null) + } + } + + // Should not error. + this[kDestroy](err).then(() => { + queueMicrotask(onDestroyed) + }) + } + + [kInterceptedDispatch] (opts, handler) { + if (!this[kInterceptors] || this[kInterceptors].length === 0) { + this[kInterceptedDispatch] = this[kDispatch] + return this[kDispatch](opts, handler) + } + + let dispatch = this[kDispatch].bind(this) + for (let i = this[kInterceptors].length - 1; i >= 0; i--) { + dispatch = this[kInterceptors][i](dispatch) + } + this[kInterceptedDispatch] = dispatch + return dispatch(opts, handler) + } + + dispatch (opts, handler) { + if (!handler || typeof handler !== 'object') { + throw new InvalidArgumentError('handler must be an object') + } + + try { + if (!opts || typeof opts !== 'object') { + throw new InvalidArgumentError('opts must be an object.') + } + + if (this[kDestroyed] || this[kOnDestroyed]) { + throw new ClientDestroyedError() + } + + if (this[kClosed]) { + throw new ClientClosedError() + } + + return this[kInterceptedDispatch](opts, handler) + } catch (err) { + if (typeof handler.onError !== 'function') { + throw new InvalidArgumentError('invalid onError method') + } + + handler.onError(err) + + return false + } + } +} + +module.exports = DispatcherBase + + +/***/ }), + +/***/ 992: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const EventEmitter = __nccwpck_require__(4434) + +class Dispatcher extends EventEmitter { + dispatch () { + throw new Error('not implemented') + } + + close () { + throw new Error('not implemented') + } + + destroy () { + throw new Error('not implemented') + } +} + +module.exports = Dispatcher + + +/***/ }), + +/***/ 8923: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const Busboy = __nccwpck_require__(9581) +const util = __nccwpck_require__(3440) +const { + ReadableStreamFrom, + isBlobLike, + isReadableStreamLike, + readableStreamClose, + createDeferredPromise, + fullyReadBody +} = __nccwpck_require__(5523) +const { FormData } = __nccwpck_require__(3073) +const { kState } = __nccwpck_require__(9710) +const { webidl } = __nccwpck_require__(4222) +const { DOMException, structuredClone } = __nccwpck_require__(7326) +const { Blob, File: NativeFile } = __nccwpck_require__(181) +const { kBodyUsed } = __nccwpck_require__(6443) +const assert = __nccwpck_require__(2613) +const { isErrored } = __nccwpck_require__(3440) +const { isUint8Array, isArrayBuffer } = __nccwpck_require__(8253) +const { File: UndiciFile } = __nccwpck_require__(3041) +const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(4322) + +let ReadableStream = globalThis.ReadableStream + +/** @type {globalThis['File']} */ +const File = NativeFile ?? UndiciFile +const textEncoder = new TextEncoder() +const textDecoder = new TextDecoder() + +// https://fetch.spec.whatwg.org/#concept-bodyinit-extract +function extractBody (object, keepalive = false) { + if (!ReadableStream) { + ReadableStream = (__nccwpck_require__(3774).ReadableStream) + } + + // 1. Let stream be null. + let stream = null + + // 2. If object is a ReadableStream object, then set stream to object. + if (object instanceof ReadableStream) { + stream = object + } else if (isBlobLike(object)) { + // 3. Otherwise, if object is a Blob object, set stream to the + // result of running object’s get stream. + stream = object.stream() + } else { + // 4. Otherwise, set stream to a new ReadableStream object, and set + // up stream. + stream = new ReadableStream({ + async pull (controller) { + controller.enqueue( + typeof source === 'string' ? textEncoder.encode(source) : source + ) + queueMicrotask(() => readableStreamClose(controller)) + }, + start () {}, + type: undefined + }) + } + + // 5. Assert: stream is a ReadableStream object. + assert(isReadableStreamLike(stream)) + + // 6. Let action be null. + let action = null + + // 7. Let source be null. + let source = null + + // 8. Let length be null. + let length = null + + // 9. Let type be null. + let type = null + + // 10. Switch on object: + if (typeof object === 'string') { + // Set source to the UTF-8 encoding of object. + // Note: setting source to a Uint8Array here breaks some mocking assumptions. + source = object + + // Set type to `text/plain;charset=UTF-8`. + type = 'text/plain;charset=UTF-8' + } else if (object instanceof URLSearchParams) { + // URLSearchParams + + // spec says to run application/x-www-form-urlencoded on body.list + // this is implemented in Node.js as apart of an URLSearchParams instance toString method + // See: https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L490 + // and https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L1100 + + // Set source to the result of running the application/x-www-form-urlencoded serializer with object’s list. + source = object.toString() + + // Set type to `application/x-www-form-urlencoded;charset=UTF-8`. + type = 'application/x-www-form-urlencoded;charset=UTF-8' + } else if (isArrayBuffer(object)) { + // BufferSource/ArrayBuffer + + // Set source to a copy of the bytes held by object. + source = new Uint8Array(object.slice()) + } else if (ArrayBuffer.isView(object)) { + // BufferSource/ArrayBufferView + + // Set source to a copy of the bytes held by object. + source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength)) + } else if (util.isFormDataLike(object)) { + const boundary = `----formdata-undici-0${`${Math.floor(Math.random() * 1e11)}`.padStart(11, '0')}` + const prefix = `--${boundary}\r\nContent-Disposition: form-data` + + /*! formdata-polyfill. MIT License. Jimmy Wärting */ + const escape = (str) => + str.replace(/\n/g, '%0A').replace(/\r/g, '%0D').replace(/"/g, '%22') + const normalizeLinefeeds = (value) => value.replace(/\r?\n|\r/g, '\r\n') + + // Set action to this step: run the multipart/form-data + // encoding algorithm, with object’s entry list and UTF-8. + // - This ensures that the body is immutable and can't be changed afterwords + // - That the content-length is calculated in advance. + // - And that all parts are pre-encoded and ready to be sent. + + const blobParts = [] + const rn = new Uint8Array([13, 10]) // '\r\n' + length = 0 + let hasUnknownSizeValue = false + + for (const [name, value] of object) { + if (typeof value === 'string') { + const chunk = textEncoder.encode(prefix + + `; name="${escape(normalizeLinefeeds(name))}"` + + `\r\n\r\n${normalizeLinefeeds(value)}\r\n`) + blobParts.push(chunk) + length += chunk.byteLength + } else { + const chunk = textEncoder.encode(`${prefix}; name="${escape(normalizeLinefeeds(name))}"` + + (value.name ? `; filename="${escape(value.name)}"` : '') + '\r\n' + + `Content-Type: ${ + value.type || 'application/octet-stream' + }\r\n\r\n`) + blobParts.push(chunk, value, rn) + if (typeof value.size === 'number') { + length += chunk.byteLength + value.size + rn.byteLength + } else { + hasUnknownSizeValue = true + } + } + } + + const chunk = textEncoder.encode(`--${boundary}--`) + blobParts.push(chunk) + length += chunk.byteLength + if (hasUnknownSizeValue) { + length = null + } + + // Set source to object. + source = object + + action = async function * () { + for (const part of blobParts) { + if (part.stream) { + yield * part.stream() + } else { + yield part + } + } + } + + // Set type to `multipart/form-data; boundary=`, + // followed by the multipart/form-data boundary string generated + // by the multipart/form-data encoding algorithm. + type = 'multipart/form-data; boundary=' + boundary + } else if (isBlobLike(object)) { + // Blob + + // Set source to object. + source = object + + // Set length to object’s size. + length = object.size + + // If object’s type attribute is not the empty byte sequence, set + // type to its value. + if (object.type) { + type = object.type + } + } else if (typeof object[Symbol.asyncIterator] === 'function') { + // If keepalive is true, then throw a TypeError. + if (keepalive) { + throw new TypeError('keepalive') + } + + // If object is disturbed or locked, then throw a TypeError. + if (util.isDisturbed(object) || object.locked) { + throw new TypeError( + 'Response body object should not be disturbed or locked' + ) + } + + stream = + object instanceof ReadableStream ? object : ReadableStreamFrom(object) + } + + // 11. If source is a byte sequence, then set action to a + // step that returns source and length to source’s length. + if (typeof source === 'string' || util.isBuffer(source)) { + length = Buffer.byteLength(source) + } + + // 12. If action is non-null, then run these steps in in parallel: + if (action != null) { + // Run action. + let iterator + stream = new ReadableStream({ + async start () { + iterator = action(object)[Symbol.asyncIterator]() + }, + async pull (controller) { + const { value, done } = await iterator.next() + if (done) { + // When running action is done, close stream. + queueMicrotask(() => { + controller.close() + }) + } else { + // Whenever one or more bytes are available and stream is not errored, + // enqueue a Uint8Array wrapping an ArrayBuffer containing the available + // bytes into stream. + if (!isErrored(stream)) { + controller.enqueue(new Uint8Array(value)) + } + } + return controller.desiredSize > 0 + }, + async cancel (reason) { + await iterator.return() + }, + type: undefined + }) + } + + // 13. Let body be a body whose stream is stream, source is source, + // and length is length. + const body = { stream, source, length } + + // 14. Return (body, type). + return [body, type] +} + +// https://fetch.spec.whatwg.org/#bodyinit-safely-extract +function safelyExtractBody (object, keepalive = false) { + if (!ReadableStream) { + // istanbul ignore next + ReadableStream = (__nccwpck_require__(3774).ReadableStream) + } + + // To safely extract a body and a `Content-Type` value from + // a byte sequence or BodyInit object object, run these steps: + + // 1. If object is a ReadableStream object, then: + if (object instanceof ReadableStream) { + // Assert: object is neither disturbed nor locked. + // istanbul ignore next + assert(!util.isDisturbed(object), 'The body has already been consumed.') + // istanbul ignore next + assert(!object.locked, 'The stream is locked.') + } + + // 2. Return the results of extracting object. + return extractBody(object, keepalive) +} + +function cloneBody (body) { + // To clone a body body, run these steps: + + // https://fetch.spec.whatwg.org/#concept-body-clone + + // 1. Let « out1, out2 » be the result of teeing body’s stream. + const [out1, out2] = body.stream.tee() + const out2Clone = structuredClone(out2, { transfer: [out2] }) + // This, for whatever reasons, unrefs out2Clone which allows + // the process to exit by itself. + const [, finalClone] = out2Clone.tee() + + // 2. Set body’s stream to out1. + body.stream = out1 + + // 3. Return a body whose stream is out2 and other members are copied from body. + return { + stream: finalClone, + length: body.length, + source: body.source + } +} + +async function * consumeBody (body) { + if (body) { + if (isUint8Array(body)) { + yield body + } else { + const stream = body.stream + + if (util.isDisturbed(stream)) { + throw new TypeError('The body has already been consumed.') + } + + if (stream.locked) { + throw new TypeError('The stream is locked.') + } + + // Compat. + stream[kBodyUsed] = true + + yield * stream + } + } +} + +function throwIfAborted (state) { + if (state.aborted) { + throw new DOMException('The operation was aborted.', 'AbortError') + } +} + +function bodyMixinMethods (instance) { + const methods = { + blob () { + // The blob() method steps are to return the result of + // running consume body with this and the following step + // given a byte sequence bytes: return a Blob whose + // contents are bytes and whose type attribute is this’s + // MIME type. + return specConsumeBody(this, (bytes) => { + let mimeType = bodyMimeType(this) + + if (mimeType === 'failure') { + mimeType = '' + } else if (mimeType) { + mimeType = serializeAMimeType(mimeType) + } + + // Return a Blob whose contents are bytes and type attribute + // is mimeType. + return new Blob([bytes], { type: mimeType }) + }, instance) + }, + + arrayBuffer () { + // The arrayBuffer() method steps are to return the result + // of running consume body with this and the following step + // given a byte sequence bytes: return a new ArrayBuffer + // whose contents are bytes. + return specConsumeBody(this, (bytes) => { + return new Uint8Array(bytes).buffer + }, instance) + }, + + text () { + // The text() method steps are to return the result of running + // consume body with this and UTF-8 decode. + return specConsumeBody(this, utf8DecodeBytes, instance) + }, + + json () { + // The json() method steps are to return the result of running + // consume body with this and parse JSON from bytes. + return specConsumeBody(this, parseJSONFromBytes, instance) + }, + + async formData () { + webidl.brandCheck(this, instance) + + throwIfAborted(this[kState]) + + const contentType = this.headers.get('Content-Type') + + // If mimeType’s essence is "multipart/form-data", then: + if (/multipart\/form-data/.test(contentType)) { + const headers = {} + for (const [key, value] of this.headers) headers[key.toLowerCase()] = value + + const responseFormData = new FormData() + + let busboy + + try { + busboy = new Busboy({ + headers, + preservePath: true + }) + } catch (err) { + throw new DOMException(`${err}`, 'AbortError') + } + + busboy.on('field', (name, value) => { + responseFormData.append(name, value) + }) + busboy.on('file', (name, value, filename, encoding, mimeType) => { + const chunks = [] + + if (encoding === 'base64' || encoding.toLowerCase() === 'base64') { + let base64chunk = '' + + value.on('data', (chunk) => { + base64chunk += chunk.toString().replace(/[\r\n]/gm, '') + + const end = base64chunk.length - base64chunk.length % 4 + chunks.push(Buffer.from(base64chunk.slice(0, end), 'base64')) + + base64chunk = base64chunk.slice(end) + }) + value.on('end', () => { + chunks.push(Buffer.from(base64chunk, 'base64')) + responseFormData.append(name, new File(chunks, filename, { type: mimeType })) + }) + } else { + value.on('data', (chunk) => { + chunks.push(chunk) + }) + value.on('end', () => { + responseFormData.append(name, new File(chunks, filename, { type: mimeType })) + }) + } + }) + + const busboyResolve = new Promise((resolve, reject) => { + busboy.on('finish', resolve) + busboy.on('error', (err) => reject(new TypeError(err))) + }) + + if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk) + busboy.end() + await busboyResolve + + return responseFormData + } else if (/application\/x-www-form-urlencoded/.test(contentType)) { + // Otherwise, if mimeType’s essence is "application/x-www-form-urlencoded", then: + + // 1. Let entries be the result of parsing bytes. + let entries + try { + let text = '' + // application/x-www-form-urlencoded parser will keep the BOM. + // https://url.spec.whatwg.org/#concept-urlencoded-parser + // Note that streaming decoder is stateful and cannot be reused + const streamingDecoder = new TextDecoder('utf-8', { ignoreBOM: true }) + + for await (const chunk of consumeBody(this[kState].body)) { + if (!isUint8Array(chunk)) { + throw new TypeError('Expected Uint8Array chunk') + } + text += streamingDecoder.decode(chunk, { stream: true }) + } + text += streamingDecoder.decode() + entries = new URLSearchParams(text) + } catch (err) { + // istanbul ignore next: Unclear when new URLSearchParams can fail on a string. + // 2. If entries is failure, then throw a TypeError. + throw Object.assign(new TypeError(), { cause: err }) + } + + // 3. Return a new FormData object whose entries are entries. + const formData = new FormData() + for (const [name, value] of entries) { + formData.append(name, value) + } + return formData + } else { + // Wait a tick before checking if the request has been aborted. + // Otherwise, a TypeError can be thrown when an AbortError should. + await Promise.resolve() + + throwIfAborted(this[kState]) + + // Otherwise, throw a TypeError. + throw webidl.errors.exception({ + header: `${instance.name}.formData`, + message: 'Could not parse content as FormData.' + }) + } + } + } + + return methods +} + +function mixinBody (prototype) { + Object.assign(prototype.prototype, bodyMixinMethods(prototype)) +} + +/** + * @see https://fetch.spec.whatwg.org/#concept-body-consume-body + * @param {Response|Request} object + * @param {(value: unknown) => unknown} convertBytesToJSValue + * @param {Response|Request} instance + */ +async function specConsumeBody (object, convertBytesToJSValue, instance) { + webidl.brandCheck(object, instance) + + throwIfAborted(object[kState]) + + // 1. If object is unusable, then return a promise rejected + // with a TypeError. + if (bodyUnusable(object[kState].body)) { + throw new TypeError('Body is unusable') + } + + // 2. Let promise be a new promise. + const promise = createDeferredPromise() + + // 3. Let errorSteps given error be to reject promise with error. + const errorSteps = (error) => promise.reject(error) + + // 4. Let successSteps given a byte sequence data be to resolve + // promise with the result of running convertBytesToJSValue + // with data. If that threw an exception, then run errorSteps + // with that exception. + const successSteps = (data) => { + try { + promise.resolve(convertBytesToJSValue(data)) + } catch (e) { + errorSteps(e) + } + } + + // 5. If object’s body is null, then run successSteps with an + // empty byte sequence. + if (object[kState].body == null) { + successSteps(new Uint8Array()) + return promise.promise + } + + // 6. Otherwise, fully read object’s body given successSteps, + // errorSteps, and object’s relevant global object. + await fullyReadBody(object[kState].body, successSteps, errorSteps) + + // 7. Return promise. + return promise.promise +} + +// https://fetch.spec.whatwg.org/#body-unusable +function bodyUnusable (body) { + // An object including the Body interface mixin is + // said to be unusable if its body is non-null and + // its body’s stream is disturbed or locked. + return body != null && (body.stream.locked || util.isDisturbed(body.stream)) +} + +/** + * @see https://encoding.spec.whatwg.org/#utf-8-decode + * @param {Buffer} buffer + */ +function utf8DecodeBytes (buffer) { + if (buffer.length === 0) { + return '' + } + + // 1. Let buffer be the result of peeking three bytes from + // ioQueue, converted to a byte sequence. + + // 2. If buffer is 0xEF 0xBB 0xBF, then read three + // bytes from ioQueue. (Do nothing with those bytes.) + if (buffer[0] === 0xEF && buffer[1] === 0xBB && buffer[2] === 0xBF) { + buffer = buffer.subarray(3) + } + + // 3. Process a queue with an instance of UTF-8’s + // decoder, ioQueue, output, and "replacement". + const output = textDecoder.decode(buffer) + + // 4. Return output. + return output +} + +/** + * @see https://infra.spec.whatwg.org/#parse-json-bytes-to-a-javascript-value + * @param {Uint8Array} bytes + */ +function parseJSONFromBytes (bytes) { + return JSON.parse(utf8DecodeBytes(bytes)) +} + +/** + * @see https://fetch.spec.whatwg.org/#concept-body-mime-type + * @param {import('./response').Response|import('./request').Request} object + */ +function bodyMimeType (object) { + const { headersList } = object[kState] + const contentType = headersList.get('content-type') + + if (contentType === null) { + return 'failure' + } + + return parseMIMEType(contentType) +} + +module.exports = { + extractBody, + safelyExtractBody, + cloneBody, + mixinBody +} + + +/***/ }), + +/***/ 7326: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { MessageChannel, receiveMessageOnPort } = __nccwpck_require__(8167) + +const corsSafeListedMethods = ['GET', 'HEAD', 'POST'] +const corsSafeListedMethodsSet = new Set(corsSafeListedMethods) + +const nullBodyStatus = [101, 204, 205, 304] + +const redirectStatus = [301, 302, 303, 307, 308] +const redirectStatusSet = new Set(redirectStatus) + +// https://fetch.spec.whatwg.org/#block-bad-port +const badPorts = [ + '1', '7', '9', '11', '13', '15', '17', '19', '20', '21', '22', '23', '25', '37', '42', '43', '53', '69', '77', '79', + '87', '95', '101', '102', '103', '104', '109', '110', '111', '113', '115', '117', '119', '123', '135', '137', + '139', '143', '161', '179', '389', '427', '465', '512', '513', '514', '515', '526', '530', '531', '532', + '540', '548', '554', '556', '563', '587', '601', '636', '989', '990', '993', '995', '1719', '1720', '1723', + '2049', '3659', '4045', '5060', '5061', '6000', '6566', '6665', '6666', '6667', '6668', '6669', '6697', + '10080' +] + +const badPortsSet = new Set(badPorts) + +// https://w3c.github.io/webappsec-referrer-policy/#referrer-policies +const referrerPolicy = [ + '', + 'no-referrer', + 'no-referrer-when-downgrade', + 'same-origin', + 'origin', + 'strict-origin', + 'origin-when-cross-origin', + 'strict-origin-when-cross-origin', + 'unsafe-url' +] +const referrerPolicySet = new Set(referrerPolicy) + +const requestRedirect = ['follow', 'manual', 'error'] + +const safeMethods = ['GET', 'HEAD', 'OPTIONS', 'TRACE'] +const safeMethodsSet = new Set(safeMethods) + +const requestMode = ['navigate', 'same-origin', 'no-cors', 'cors'] + +const requestCredentials = ['omit', 'same-origin', 'include'] + +const requestCache = [ + 'default', + 'no-store', + 'reload', + 'no-cache', + 'force-cache', + 'only-if-cached' +] + +// https://fetch.spec.whatwg.org/#request-body-header-name +const requestBodyHeader = [ + 'content-encoding', + 'content-language', + 'content-location', + 'content-type', + // See https://github.com/nodejs/undici/issues/2021 + // 'Content-Length' is a forbidden header name, which is typically + // removed in the Headers implementation. However, undici doesn't + // filter out headers, so we add it here. + 'content-length' +] + +// https://fetch.spec.whatwg.org/#enumdef-requestduplex +const requestDuplex = [ + 'half' +] + +// http://fetch.spec.whatwg.org/#forbidden-method +const forbiddenMethods = ['CONNECT', 'TRACE', 'TRACK'] +const forbiddenMethodsSet = new Set(forbiddenMethods) + +const subresource = [ + 'audio', + 'audioworklet', + 'font', + 'image', + 'manifest', + 'paintworklet', + 'script', + 'style', + 'track', + 'video', + 'xslt', + '' +] +const subresourceSet = new Set(subresource) + +/** @type {globalThis['DOMException']} */ +const DOMException = globalThis.DOMException ?? (() => { + // DOMException was only made a global in Node v17.0.0, + // but fetch supports >= v16.8. + try { + atob('~') + } catch (err) { + return Object.getPrototypeOf(err).constructor + } +})() + +let channel + +/** @type {globalThis['structuredClone']} */ +const structuredClone = + globalThis.structuredClone ?? + // https://github.com/nodejs/node/blob/b27ae24dcc4251bad726d9d84baf678d1f707fed/lib/internal/structured_clone.js + // structuredClone was added in v17.0.0, but fetch supports v16.8 + function structuredClone (value, options = undefined) { + if (arguments.length === 0) { + throw new TypeError('missing argument') + } + + if (!channel) { + channel = new MessageChannel() + } + channel.port1.unref() + channel.port2.unref() + channel.port1.postMessage(value, options?.transfer) + return receiveMessageOnPort(channel.port2).message + } + +module.exports = { + DOMException, + structuredClone, + subresource, + forbiddenMethods, + requestBodyHeader, + referrerPolicy, + requestRedirect, + requestMode, + requestCredentials, + requestCache, + redirectStatus, + corsSafeListedMethods, + nullBodyStatus, + safeMethods, + badPorts, + requestDuplex, + subresourceSet, + badPortsSet, + redirectStatusSet, + corsSafeListedMethodsSet, + safeMethodsSet, + forbiddenMethodsSet, + referrerPolicySet +} + + +/***/ }), + +/***/ 4322: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const assert = __nccwpck_require__(2613) +const { atob } = __nccwpck_require__(181) +const { isomorphicDecode } = __nccwpck_require__(5523) + +const encoder = new TextEncoder() + +/** + * @see https://mimesniff.spec.whatwg.org/#http-token-code-point + */ +const HTTP_TOKEN_CODEPOINTS = /^[!#$%&'*+-.^_|~A-Za-z0-9]+$/ +const HTTP_WHITESPACE_REGEX = /(\u000A|\u000D|\u0009|\u0020)/ // eslint-disable-line +/** + * @see https://mimesniff.spec.whatwg.org/#http-quoted-string-token-code-point + */ +const HTTP_QUOTED_STRING_TOKENS = /[\u0009|\u0020-\u007E|\u0080-\u00FF]/ // eslint-disable-line + +// https://fetch.spec.whatwg.org/#data-url-processor +/** @param {URL} dataURL */ +function dataURLProcessor (dataURL) { + // 1. Assert: dataURL’s scheme is "data". + assert(dataURL.protocol === 'data:') + + // 2. Let input be the result of running the URL + // serializer on dataURL with exclude fragment + // set to true. + let input = URLSerializer(dataURL, true) + + // 3. Remove the leading "data:" string from input. + input = input.slice(5) + + // 4. Let position point at the start of input. + const position = { position: 0 } + + // 5. Let mimeType be the result of collecting a + // sequence of code points that are not equal + // to U+002C (,), given position. + let mimeType = collectASequenceOfCodePointsFast( + ',', + input, + position + ) + + // 6. Strip leading and trailing ASCII whitespace + // from mimeType. + // Undici implementation note: we need to store the + // length because if the mimetype has spaces removed, + // the wrong amount will be sliced from the input in + // step #9 + const mimeTypeLength = mimeType.length + mimeType = removeASCIIWhitespace(mimeType, true, true) + + // 7. If position is past the end of input, then + // return failure + if (position.position >= input.length) { + return 'failure' + } + + // 8. Advance position by 1. + position.position++ + + // 9. Let encodedBody be the remainder of input. + const encodedBody = input.slice(mimeTypeLength + 1) + + // 10. Let body be the percent-decoding of encodedBody. + let body = stringPercentDecode(encodedBody) + + // 11. If mimeType ends with U+003B (;), followed by + // zero or more U+0020 SPACE, followed by an ASCII + // case-insensitive match for "base64", then: + if (/;(\u0020){0,}base64$/i.test(mimeType)) { + // 1. Let stringBody be the isomorphic decode of body. + const stringBody = isomorphicDecode(body) + + // 2. Set body to the forgiving-base64 decode of + // stringBody. + body = forgivingBase64(stringBody) + + // 3. If body is failure, then return failure. + if (body === 'failure') { + return 'failure' + } + + // 4. Remove the last 6 code points from mimeType. + mimeType = mimeType.slice(0, -6) + + // 5. Remove trailing U+0020 SPACE code points from mimeType, + // if any. + mimeType = mimeType.replace(/(\u0020)+$/, '') + + // 6. Remove the last U+003B (;) code point from mimeType. + mimeType = mimeType.slice(0, -1) + } + + // 12. If mimeType starts with U+003B (;), then prepend + // "text/plain" to mimeType. + if (mimeType.startsWith(';')) { + mimeType = 'text/plain' + mimeType + } + + // 13. Let mimeTypeRecord be the result of parsing + // mimeType. + let mimeTypeRecord = parseMIMEType(mimeType) + + // 14. If mimeTypeRecord is failure, then set + // mimeTypeRecord to text/plain;charset=US-ASCII. + if (mimeTypeRecord === 'failure') { + mimeTypeRecord = parseMIMEType('text/plain;charset=US-ASCII') + } + + // 15. Return a new data: URL struct whose MIME + // type is mimeTypeRecord and body is body. + // https://fetch.spec.whatwg.org/#data-url-struct + return { mimeType: mimeTypeRecord, body } +} + +// https://url.spec.whatwg.org/#concept-url-serializer +/** + * @param {URL} url + * @param {boolean} excludeFragment + */ +function URLSerializer (url, excludeFragment = false) { + if (!excludeFragment) { + return url.href + } + + const href = url.href + const hashLength = url.hash.length + + return hashLength === 0 ? href : href.substring(0, href.length - hashLength) +} + +// https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points +/** + * @param {(char: string) => boolean} condition + * @param {string} input + * @param {{ position: number }} position + */ +function collectASequenceOfCodePoints (condition, input, position) { + // 1. Let result be the empty string. + let result = '' + + // 2. While position doesn’t point past the end of input and the + // code point at position within input meets the condition condition: + while (position.position < input.length && condition(input[position.position])) { + // 1. Append that code point to the end of result. + result += input[position.position] + + // 2. Advance position by 1. + position.position++ + } + + // 3. Return result. + return result +} + +/** + * A faster collectASequenceOfCodePoints that only works when comparing a single character. + * @param {string} char + * @param {string} input + * @param {{ position: number }} position + */ +function collectASequenceOfCodePointsFast (char, input, position) { + const idx = input.indexOf(char, position.position) + const start = position.position + + if (idx === -1) { + position.position = input.length + return input.slice(start) + } + + position.position = idx + return input.slice(start, position.position) +} + +// https://url.spec.whatwg.org/#string-percent-decode +/** @param {string} input */ +function stringPercentDecode (input) { + // 1. Let bytes be the UTF-8 encoding of input. + const bytes = encoder.encode(input) + + // 2. Return the percent-decoding of bytes. + return percentDecode(bytes) +} + +// https://url.spec.whatwg.org/#percent-decode +/** @param {Uint8Array} input */ +function percentDecode (input) { + // 1. Let output be an empty byte sequence. + /** @type {number[]} */ + const output = [] + + // 2. For each byte byte in input: + for (let i = 0; i < input.length; i++) { + const byte = input[i] + + // 1. If byte is not 0x25 (%), then append byte to output. + if (byte !== 0x25) { + output.push(byte) + + // 2. Otherwise, if byte is 0x25 (%) and the next two bytes + // after byte in input are not in the ranges + // 0x30 (0) to 0x39 (9), 0x41 (A) to 0x46 (F), + // and 0x61 (a) to 0x66 (f), all inclusive, append byte + // to output. + } else if ( + byte === 0x25 && + !/^[0-9A-Fa-f]{2}$/i.test(String.fromCharCode(input[i + 1], input[i + 2])) + ) { + output.push(0x25) + + // 3. Otherwise: + } else { + // 1. Let bytePoint be the two bytes after byte in input, + // decoded, and then interpreted as hexadecimal number. + const nextTwoBytes = String.fromCharCode(input[i + 1], input[i + 2]) + const bytePoint = Number.parseInt(nextTwoBytes, 16) + + // 2. Append a byte whose value is bytePoint to output. + output.push(bytePoint) + + // 3. Skip the next two bytes in input. + i += 2 + } + } + + // 3. Return output. + return Uint8Array.from(output) +} + +// https://mimesniff.spec.whatwg.org/#parse-a-mime-type +/** @param {string} input */ +function parseMIMEType (input) { + // 1. Remove any leading and trailing HTTP whitespace + // from input. + input = removeHTTPWhitespace(input, true, true) + + // 2. Let position be a position variable for input, + // initially pointing at the start of input. + const position = { position: 0 } + + // 3. Let type be the result of collecting a sequence + // of code points that are not U+002F (/) from + // input, given position. + const type = collectASequenceOfCodePointsFast( + '/', + input, + position + ) + + // 4. If type is the empty string or does not solely + // contain HTTP token code points, then return failure. + // https://mimesniff.spec.whatwg.org/#http-token-code-point + if (type.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(type)) { + return 'failure' + } + + // 5. If position is past the end of input, then return + // failure + if (position.position > input.length) { + return 'failure' + } + + // 6. Advance position by 1. (This skips past U+002F (/).) + position.position++ + + // 7. Let subtype be the result of collecting a sequence of + // code points that are not U+003B (;) from input, given + // position. + let subtype = collectASequenceOfCodePointsFast( + ';', + input, + position + ) + + // 8. Remove any trailing HTTP whitespace from subtype. + subtype = removeHTTPWhitespace(subtype, false, true) + + // 9. If subtype is the empty string or does not solely + // contain HTTP token code points, then return failure. + if (subtype.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(subtype)) { + return 'failure' + } + + const typeLowercase = type.toLowerCase() + const subtypeLowercase = subtype.toLowerCase() + + // 10. Let mimeType be a new MIME type record whose type + // is type, in ASCII lowercase, and subtype is subtype, + // in ASCII lowercase. + // https://mimesniff.spec.whatwg.org/#mime-type + const mimeType = { + type: typeLowercase, + subtype: subtypeLowercase, + /** @type {Map} */ + parameters: new Map(), + // https://mimesniff.spec.whatwg.org/#mime-type-essence + essence: `${typeLowercase}/${subtypeLowercase}` + } + + // 11. While position is not past the end of input: + while (position.position < input.length) { + // 1. Advance position by 1. (This skips past U+003B (;).) + position.position++ + + // 2. Collect a sequence of code points that are HTTP + // whitespace from input given position. + collectASequenceOfCodePoints( + // https://fetch.spec.whatwg.org/#http-whitespace + char => HTTP_WHITESPACE_REGEX.test(char), + input, + position + ) + + // 3. Let parameterName be the result of collecting a + // sequence of code points that are not U+003B (;) + // or U+003D (=) from input, given position. + let parameterName = collectASequenceOfCodePoints( + (char) => char !== ';' && char !== '=', + input, + position + ) + + // 4. Set parameterName to parameterName, in ASCII + // lowercase. + parameterName = parameterName.toLowerCase() + + // 5. If position is not past the end of input, then: + if (position.position < input.length) { + // 1. If the code point at position within input is + // U+003B (;), then continue. + if (input[position.position] === ';') { + continue + } + + // 2. Advance position by 1. (This skips past U+003D (=).) + position.position++ + } + + // 6. If position is past the end of input, then break. + if (position.position > input.length) { + break + } + + // 7. Let parameterValue be null. + let parameterValue = null + + // 8. If the code point at position within input is + // U+0022 ("), then: + if (input[position.position] === '"') { + // 1. Set parameterValue to the result of collecting + // an HTTP quoted string from input, given position + // and the extract-value flag. + parameterValue = collectAnHTTPQuotedString(input, position, true) + + // 2. Collect a sequence of code points that are not + // U+003B (;) from input, given position. + collectASequenceOfCodePointsFast( + ';', + input, + position + ) + + // 9. Otherwise: + } else { + // 1. Set parameterValue to the result of collecting + // a sequence of code points that are not U+003B (;) + // from input, given position. + parameterValue = collectASequenceOfCodePointsFast( + ';', + input, + position + ) + + // 2. Remove any trailing HTTP whitespace from parameterValue. + parameterValue = removeHTTPWhitespace(parameterValue, false, true) + + // 3. If parameterValue is the empty string, then continue. + if (parameterValue.length === 0) { + continue + } + } + + // 10. If all of the following are true + // - parameterName is not the empty string + // - parameterName solely contains HTTP token code points + // - parameterValue solely contains HTTP quoted-string token code points + // - mimeType’s parameters[parameterName] does not exist + // then set mimeType’s parameters[parameterName] to parameterValue. + if ( + parameterName.length !== 0 && + HTTP_TOKEN_CODEPOINTS.test(parameterName) && + (parameterValue.length === 0 || HTTP_QUOTED_STRING_TOKENS.test(parameterValue)) && + !mimeType.parameters.has(parameterName) + ) { + mimeType.parameters.set(parameterName, parameterValue) + } + } + + // 12. Return mimeType. + return mimeType +} + +// https://infra.spec.whatwg.org/#forgiving-base64-decode +/** @param {string} data */ +function forgivingBase64 (data) { + // 1. Remove all ASCII whitespace from data. + data = data.replace(/[\u0009\u000A\u000C\u000D\u0020]/g, '') // eslint-disable-line + + // 2. If data’s code point length divides by 4 leaving + // no remainder, then: + if (data.length % 4 === 0) { + // 1. If data ends with one or two U+003D (=) code points, + // then remove them from data. + data = data.replace(/=?=$/, '') + } + + // 3. If data’s code point length divides by 4 leaving + // a remainder of 1, then return failure. + if (data.length % 4 === 1) { + return 'failure' + } + + // 4. If data contains a code point that is not one of + // U+002B (+) + // U+002F (/) + // ASCII alphanumeric + // then return failure. + if (/[^+/0-9A-Za-z]/.test(data)) { + return 'failure' + } + + const binary = atob(data) + const bytes = new Uint8Array(binary.length) + + for (let byte = 0; byte < binary.length; byte++) { + bytes[byte] = binary.charCodeAt(byte) + } + + return bytes +} + +// https://fetch.spec.whatwg.org/#collect-an-http-quoted-string +// tests: https://fetch.spec.whatwg.org/#example-http-quoted-string +/** + * @param {string} input + * @param {{ position: number }} position + * @param {boolean?} extractValue + */ +function collectAnHTTPQuotedString (input, position, extractValue) { + // 1. Let positionStart be position. + const positionStart = position.position + + // 2. Let value be the empty string. + let value = '' + + // 3. Assert: the code point at position within input + // is U+0022 ("). + assert(input[position.position] === '"') + + // 4. Advance position by 1. + position.position++ + + // 5. While true: + while (true) { + // 1. Append the result of collecting a sequence of code points + // that are not U+0022 (") or U+005C (\) from input, given + // position, to value. + value += collectASequenceOfCodePoints( + (char) => char !== '"' && char !== '\\', + input, + position + ) + + // 2. If position is past the end of input, then break. + if (position.position >= input.length) { + break + } + + // 3. Let quoteOrBackslash be the code point at position within + // input. + const quoteOrBackslash = input[position.position] + + // 4. Advance position by 1. + position.position++ + + // 5. If quoteOrBackslash is U+005C (\), then: + if (quoteOrBackslash === '\\') { + // 1. If position is past the end of input, then append + // U+005C (\) to value and break. + if (position.position >= input.length) { + value += '\\' + break + } + + // 2. Append the code point at position within input to value. + value += input[position.position] + + // 3. Advance position by 1. + position.position++ + + // 6. Otherwise: + } else { + // 1. Assert: quoteOrBackslash is U+0022 ("). + assert(quoteOrBackslash === '"') + + // 2. Break. + break + } + } + + // 6. If the extract-value flag is set, then return value. + if (extractValue) { + return value + } + + // 7. Return the code points from positionStart to position, + // inclusive, within input. + return input.slice(positionStart, position.position) +} + +/** + * @see https://mimesniff.spec.whatwg.org/#serialize-a-mime-type + */ +function serializeAMimeType (mimeType) { + assert(mimeType !== 'failure') + const { parameters, essence } = mimeType + + // 1. Let serialization be the concatenation of mimeType’s + // type, U+002F (/), and mimeType’s subtype. + let serialization = essence + + // 2. For each name → value of mimeType’s parameters: + for (let [name, value] of parameters.entries()) { + // 1. Append U+003B (;) to serialization. + serialization += ';' + + // 2. Append name to serialization. + serialization += name + + // 3. Append U+003D (=) to serialization. + serialization += '=' + + // 4. If value does not solely contain HTTP token code + // points or value is the empty string, then: + if (!HTTP_TOKEN_CODEPOINTS.test(value)) { + // 1. Precede each occurence of U+0022 (") or + // U+005C (\) in value with U+005C (\). + value = value.replace(/(\\|")/g, '\\$1') + + // 2. Prepend U+0022 (") to value. + value = '"' + value + + // 3. Append U+0022 (") to value. + value += '"' + } + + // 5. Append value to serialization. + serialization += value + } + + // 3. Return serialization. + return serialization +} + +/** + * @see https://fetch.spec.whatwg.org/#http-whitespace + * @param {string} char + */ +function isHTTPWhiteSpace (char) { + return char === '\r' || char === '\n' || char === '\t' || char === ' ' +} + +/** + * @see https://fetch.spec.whatwg.org/#http-whitespace + * @param {string} str + */ +function removeHTTPWhitespace (str, leading = true, trailing = true) { + let lead = 0 + let trail = str.length - 1 + + if (leading) { + for (; lead < str.length && isHTTPWhiteSpace(str[lead]); lead++); + } + + if (trailing) { + for (; trail > 0 && isHTTPWhiteSpace(str[trail]); trail--); + } + + return str.slice(lead, trail + 1) +} + +/** + * @see https://infra.spec.whatwg.org/#ascii-whitespace + * @param {string} char + */ +function isASCIIWhitespace (char) { + return char === '\r' || char === '\n' || char === '\t' || char === '\f' || char === ' ' +} + +/** + * @see https://infra.spec.whatwg.org/#strip-leading-and-trailing-ascii-whitespace + */ +function removeASCIIWhitespace (str, leading = true, trailing = true) { + let lead = 0 + let trail = str.length - 1 + + if (leading) { + for (; lead < str.length && isASCIIWhitespace(str[lead]); lead++); + } + + if (trailing) { + for (; trail > 0 && isASCIIWhitespace(str[trail]); trail--); + } + + return str.slice(lead, trail + 1) +} + +module.exports = { + dataURLProcessor, + URLSerializer, + collectASequenceOfCodePoints, + collectASequenceOfCodePointsFast, + stringPercentDecode, + parseMIMEType, + collectAnHTTPQuotedString, + serializeAMimeType +} + + +/***/ }), + +/***/ 3041: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { Blob, File: NativeFile } = __nccwpck_require__(181) +const { types } = __nccwpck_require__(9023) +const { kState } = __nccwpck_require__(9710) +const { isBlobLike } = __nccwpck_require__(5523) +const { webidl } = __nccwpck_require__(4222) +const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(4322) +const { kEnumerableProperty } = __nccwpck_require__(3440) +const encoder = new TextEncoder() + +class File extends Blob { + constructor (fileBits, fileName, options = {}) { + // The File constructor is invoked with two or three parameters, depending + // on whether the optional dictionary parameter is used. When the File() + // constructor is invoked, user agents must run the following steps: + webidl.argumentLengthCheck(arguments, 2, { header: 'File constructor' }) + + fileBits = webidl.converters['sequence'](fileBits) + fileName = webidl.converters.USVString(fileName) + options = webidl.converters.FilePropertyBag(options) + + // 1. Let bytes be the result of processing blob parts given fileBits and + // options. + // Note: Blob handles this for us + + // 2. Let n be the fileName argument to the constructor. + const n = fileName + + // 3. Process FilePropertyBag dictionary argument by running the following + // substeps: + + // 1. If the type member is provided and is not the empty string, let t + // be set to the type dictionary member. If t contains any characters + // outside the range U+0020 to U+007E, then set t to the empty string + // and return from these substeps. + // 2. Convert every character in t to ASCII lowercase. + let t = options.type + let d + + // eslint-disable-next-line no-labels + substep: { + if (t) { + t = parseMIMEType(t) + + if (t === 'failure') { + t = '' + // eslint-disable-next-line no-labels + break substep + } + + t = serializeAMimeType(t).toLowerCase() + } + + // 3. If the lastModified member is provided, let d be set to the + // lastModified dictionary member. If it is not provided, set d to the + // current date and time represented as the number of milliseconds since + // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]). + d = options.lastModified + } + + // 4. Return a new File object F such that: + // F refers to the bytes byte sequence. + // F.size is set to the number of total bytes in bytes. + // F.name is set to n. + // F.type is set to t. + // F.lastModified is set to d. + + super(processBlobParts(fileBits, options), { type: t }) + this[kState] = { + name: n, + lastModified: d, + type: t + } + } + + get name () { + webidl.brandCheck(this, File) + + return this[kState].name + } + + get lastModified () { + webidl.brandCheck(this, File) + + return this[kState].lastModified + } + + get type () { + webidl.brandCheck(this, File) + + return this[kState].type + } +} + +class FileLike { + constructor (blobLike, fileName, options = {}) { + // TODO: argument idl type check + + // The File constructor is invoked with two or three parameters, depending + // on whether the optional dictionary parameter is used. When the File() + // constructor is invoked, user agents must run the following steps: + + // 1. Let bytes be the result of processing blob parts given fileBits and + // options. + + // 2. Let n be the fileName argument to the constructor. + const n = fileName + + // 3. Process FilePropertyBag dictionary argument by running the following + // substeps: + + // 1. If the type member is provided and is not the empty string, let t + // be set to the type dictionary member. If t contains any characters + // outside the range U+0020 to U+007E, then set t to the empty string + // and return from these substeps. + // TODO + const t = options.type + + // 2. Convert every character in t to ASCII lowercase. + // TODO + + // 3. If the lastModified member is provided, let d be set to the + // lastModified dictionary member. If it is not provided, set d to the + // current date and time represented as the number of milliseconds since + // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]). + const d = options.lastModified ?? Date.now() + + // 4. Return a new File object F such that: + // F refers to the bytes byte sequence. + // F.size is set to the number of total bytes in bytes. + // F.name is set to n. + // F.type is set to t. + // F.lastModified is set to d. + + this[kState] = { + blobLike, + name: n, + type: t, + lastModified: d + } + } + + stream (...args) { + webidl.brandCheck(this, FileLike) + + return this[kState].blobLike.stream(...args) + } + + arrayBuffer (...args) { + webidl.brandCheck(this, FileLike) + + return this[kState].blobLike.arrayBuffer(...args) + } + + slice (...args) { + webidl.brandCheck(this, FileLike) + + return this[kState].blobLike.slice(...args) + } + + text (...args) { + webidl.brandCheck(this, FileLike) + + return this[kState].blobLike.text(...args) + } + + get size () { + webidl.brandCheck(this, FileLike) + + return this[kState].blobLike.size + } + + get type () { + webidl.brandCheck(this, FileLike) + + return this[kState].blobLike.type + } + + get name () { + webidl.brandCheck(this, FileLike) + + return this[kState].name + } + + get lastModified () { + webidl.brandCheck(this, FileLike) + + return this[kState].lastModified + } + + get [Symbol.toStringTag] () { + return 'File' + } +} + +Object.defineProperties(File.prototype, { + [Symbol.toStringTag]: { + value: 'File', + configurable: true + }, + name: kEnumerableProperty, + lastModified: kEnumerableProperty +}) + +webidl.converters.Blob = webidl.interfaceConverter(Blob) + +webidl.converters.BlobPart = function (V, opts) { + if (webidl.util.Type(V) === 'Object') { + if (isBlobLike(V)) { + return webidl.converters.Blob(V, { strict: false }) + } + + if ( + ArrayBuffer.isView(V) || + types.isAnyArrayBuffer(V) + ) { + return webidl.converters.BufferSource(V, opts) + } + } + + return webidl.converters.USVString(V, opts) +} + +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.BlobPart +) + +// https://www.w3.org/TR/FileAPI/#dfn-FilePropertyBag +webidl.converters.FilePropertyBag = webidl.dictionaryConverter([ + { + key: 'lastModified', + converter: webidl.converters['long long'], + get defaultValue () { + return Date.now() + } + }, + { + key: 'type', + converter: webidl.converters.DOMString, + defaultValue: '' + }, + { + key: 'endings', + converter: (value) => { + value = webidl.converters.DOMString(value) + value = value.toLowerCase() + + if (value !== 'native') { + value = 'transparent' + } + + return value + }, + defaultValue: 'transparent' + } +]) + +/** + * @see https://www.w3.org/TR/FileAPI/#process-blob-parts + * @param {(NodeJS.TypedArray|Blob|string)[]} parts + * @param {{ type: string, endings: string }} options + */ +function processBlobParts (parts, options) { + // 1. Let bytes be an empty sequence of bytes. + /** @type {NodeJS.TypedArray[]} */ + const bytes = [] + + // 2. For each element in parts: + for (const element of parts) { + // 1. If element is a USVString, run the following substeps: + if (typeof element === 'string') { + // 1. Let s be element. + let s = element + + // 2. If the endings member of options is "native", set s + // to the result of converting line endings to native + // of element. + if (options.endings === 'native') { + s = convertLineEndingsNative(s) + } + + // 3. Append the result of UTF-8 encoding s to bytes. + bytes.push(encoder.encode(s)) + } else if ( + types.isAnyArrayBuffer(element) || + types.isTypedArray(element) + ) { + // 2. If element is a BufferSource, get a copy of the + // bytes held by the buffer source, and append those + // bytes to bytes. + if (!element.buffer) { // ArrayBuffer + bytes.push(new Uint8Array(element)) + } else { + bytes.push( + new Uint8Array(element.buffer, element.byteOffset, element.byteLength) + ) + } + } else if (isBlobLike(element)) { + // 3. If element is a Blob, append the bytes it represents + // to bytes. + bytes.push(element) + } + } + + // 3. Return bytes. + return bytes +} + +/** + * @see https://www.w3.org/TR/FileAPI/#convert-line-endings-to-native + * @param {string} s + */ +function convertLineEndingsNative (s) { + // 1. Let native line ending be be the code point U+000A LF. + let nativeLineEnding = '\n' + + // 2. If the underlying platform’s conventions are to + // represent newlines as a carriage return and line feed + // sequence, set native line ending to the code point + // U+000D CR followed by the code point U+000A LF. + if (process.platform === 'win32') { + nativeLineEnding = '\r\n' + } + + return s.replace(/\r?\n/g, nativeLineEnding) +} + +// If this function is moved to ./util.js, some tools (such as +// rollup) will warn about circular dependencies. See: +// https://github.com/nodejs/undici/issues/1629 +function isFileLike (object) { + return ( + (NativeFile && object instanceof NativeFile) || + object instanceof File || ( + object && + (typeof object.stream === 'function' || + typeof object.arrayBuffer === 'function') && + object[Symbol.toStringTag] === 'File' + ) + ) +} + +module.exports = { File, FileLike, isFileLike } + + +/***/ }), + +/***/ 3073: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { isBlobLike, toUSVString, makeIterator } = __nccwpck_require__(5523) +const { kState } = __nccwpck_require__(9710) +const { File: UndiciFile, FileLike, isFileLike } = __nccwpck_require__(3041) +const { webidl } = __nccwpck_require__(4222) +const { Blob, File: NativeFile } = __nccwpck_require__(181) + +/** @type {globalThis['File']} */ +const File = NativeFile ?? UndiciFile + +// https://xhr.spec.whatwg.org/#formdata +class FormData { + constructor (form) { + if (form !== undefined) { + throw webidl.errors.conversionFailed({ + prefix: 'FormData constructor', + argument: 'Argument 1', + types: ['undefined'] + }) + } + + this[kState] = [] + } + + append (name, value, filename = undefined) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.append' }) + + if (arguments.length === 3 && !isBlobLike(value)) { + throw new TypeError( + "Failed to execute 'append' on 'FormData': parameter 2 is not of type 'Blob'" + ) + } + + // 1. Let value be value if given; otherwise blobValue. + + name = webidl.converters.USVString(name) + value = isBlobLike(value) + ? webidl.converters.Blob(value, { strict: false }) + : webidl.converters.USVString(value) + filename = arguments.length === 3 + ? webidl.converters.USVString(filename) + : undefined + + // 2. Let entry be the result of creating an entry with + // name, value, and filename if given. + const entry = makeEntry(name, value, filename) + + // 3. Append entry to this’s entry list. + this[kState].push(entry) + } + + delete (name) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.delete' }) + + name = webidl.converters.USVString(name) + + // The delete(name) method steps are to remove all entries whose name + // is name from this’s entry list. + this[kState] = this[kState].filter(entry => entry.name !== name) + } + + get (name) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.get' }) + + name = webidl.converters.USVString(name) + + // 1. If there is no entry whose name is name in this’s entry list, + // then return null. + const idx = this[kState].findIndex((entry) => entry.name === name) + if (idx === -1) { + return null + } + + // 2. Return the value of the first entry whose name is name from + // this’s entry list. + return this[kState][idx].value + } + + getAll (name) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.getAll' }) + + name = webidl.converters.USVString(name) + + // 1. If there is no entry whose name is name in this’s entry list, + // then return the empty list. + // 2. Return the values of all entries whose name is name, in order, + // from this’s entry list. + return this[kState] + .filter((entry) => entry.name === name) + .map((entry) => entry.value) + } + + has (name) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.has' }) + + name = webidl.converters.USVString(name) + + // The has(name) method steps are to return true if there is an entry + // whose name is name in this’s entry list; otherwise false. + return this[kState].findIndex((entry) => entry.name === name) !== -1 + } + + set (name, value, filename = undefined) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.set' }) + + if (arguments.length === 3 && !isBlobLike(value)) { + throw new TypeError( + "Failed to execute 'set' on 'FormData': parameter 2 is not of type 'Blob'" + ) + } + + // The set(name, value) and set(name, blobValue, filename) method steps + // are: + + // 1. Let value be value if given; otherwise blobValue. + + name = webidl.converters.USVString(name) + value = isBlobLike(value) + ? webidl.converters.Blob(value, { strict: false }) + : webidl.converters.USVString(value) + filename = arguments.length === 3 + ? toUSVString(filename) + : undefined + + // 2. Let entry be the result of creating an entry with name, value, and + // filename if given. + const entry = makeEntry(name, value, filename) + + // 3. If there are entries in this’s entry list whose name is name, then + // replace the first such entry with entry and remove the others. + const idx = this[kState].findIndex((entry) => entry.name === name) + if (idx !== -1) { + this[kState] = [ + ...this[kState].slice(0, idx), + entry, + ...this[kState].slice(idx + 1).filter((entry) => entry.name !== name) + ] + } else { + // 4. Otherwise, append entry to this’s entry list. + this[kState].push(entry) + } + } + + entries () { + webidl.brandCheck(this, FormData) + + return makeIterator( + () => this[kState].map(pair => [pair.name, pair.value]), + 'FormData', + 'key+value' + ) + } + + keys () { + webidl.brandCheck(this, FormData) + + return makeIterator( + () => this[kState].map(pair => [pair.name, pair.value]), + 'FormData', + 'key' + ) + } + + values () { + webidl.brandCheck(this, FormData) + + return makeIterator( + () => this[kState].map(pair => [pair.name, pair.value]), + 'FormData', + 'value' + ) + } + + /** + * @param {(value: string, key: string, self: FormData) => void} callbackFn + * @param {unknown} thisArg + */ + forEach (callbackFn, thisArg = globalThis) { + webidl.brandCheck(this, FormData) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.forEach' }) + + if (typeof callbackFn !== 'function') { + throw new TypeError( + "Failed to execute 'forEach' on 'FormData': parameter 1 is not of type 'Function'." + ) + } + + for (const [key, value] of this) { + callbackFn.apply(thisArg, [value, key, this]) + } + } +} + +FormData.prototype[Symbol.iterator] = FormData.prototype.entries + +Object.defineProperties(FormData.prototype, { + [Symbol.toStringTag]: { + value: 'FormData', + configurable: true + } +}) + +/** + * @see https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#create-an-entry + * @param {string} name + * @param {string|Blob} value + * @param {?string} filename + * @returns + */ +function makeEntry (name, value, filename) { + // 1. Set name to the result of converting name into a scalar value string. + // "To convert a string into a scalar value string, replace any surrogates + // with U+FFFD." + // see: https://nodejs.org/dist/latest-v18.x/docs/api/buffer.html#buftostringencoding-start-end + name = Buffer.from(name).toString('utf8') + + // 2. If value is a string, then set value to the result of converting + // value into a scalar value string. + if (typeof value === 'string') { + value = Buffer.from(value).toString('utf8') + } else { + // 3. Otherwise: + + // 1. If value is not a File object, then set value to a new File object, + // representing the same bytes, whose name attribute value is "blob" + if (!isFileLike(value)) { + value = value instanceof Blob + ? new File([value], 'blob', { type: value.type }) + : new FileLike(value, 'blob', { type: value.type }) + } + + // 2. If filename is given, then set value to a new File object, + // representing the same bytes, whose name attribute is filename. + if (filename !== undefined) { + /** @type {FilePropertyBag} */ + const options = { + type: value.type, + lastModified: value.lastModified + } + + value = (NativeFile && value instanceof NativeFile) || value instanceof UndiciFile + ? new File([value], filename, options) + : new FileLike(value, filename, options) + } + } + + // 4. Return an entry whose name is name and whose value is value. + return { name, value } +} + +module.exports = { FormData } + + +/***/ }), + +/***/ 5628: +/***/ ((module) => { + +"use strict"; + + +// In case of breaking changes, increase the version +// number to avoid conflicts. +const globalOrigin = Symbol.for('undici.globalOrigin.1') + +function getGlobalOrigin () { + return globalThis[globalOrigin] +} + +function setGlobalOrigin (newOrigin) { + if (newOrigin === undefined) { + Object.defineProperty(globalThis, globalOrigin, { + value: undefined, + writable: true, + enumerable: false, + configurable: false + }) + + return + } + + const parsedURL = new URL(newOrigin) + + if (parsedURL.protocol !== 'http:' && parsedURL.protocol !== 'https:') { + throw new TypeError(`Only http & https urls are allowed, received ${parsedURL.protocol}`) + } + + Object.defineProperty(globalThis, globalOrigin, { + value: parsedURL, + writable: true, + enumerable: false, + configurable: false + }) +} + +module.exports = { + getGlobalOrigin, + setGlobalOrigin +} + + +/***/ }), + +/***/ 6349: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// https://github.com/Ethan-Arrowood/undici-fetch + + + +const { kHeadersList, kConstruct } = __nccwpck_require__(6443) +const { kGuard } = __nccwpck_require__(9710) +const { kEnumerableProperty } = __nccwpck_require__(3440) +const { + makeIterator, + isValidHeaderName, + isValidHeaderValue +} = __nccwpck_require__(5523) +const { webidl } = __nccwpck_require__(4222) +const assert = __nccwpck_require__(2613) + +const kHeadersMap = Symbol('headers map') +const kHeadersSortedMap = Symbol('headers map sorted') + +/** + * @param {number} code + */ +function isHTTPWhiteSpaceCharCode (code) { + return code === 0x00a || code === 0x00d || code === 0x009 || code === 0x020 +} + +/** + * @see https://fetch.spec.whatwg.org/#concept-header-value-normalize + * @param {string} potentialValue + */ +function headerValueNormalize (potentialValue) { + // To normalize a byte sequence potentialValue, remove + // any leading and trailing HTTP whitespace bytes from + // potentialValue. + let i = 0; let j = potentialValue.length + + while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) --j + while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i))) ++i + + return i === 0 && j === potentialValue.length ? potentialValue : potentialValue.substring(i, j) +} + +function fill (headers, object) { + // To fill a Headers object headers with a given object object, run these steps: + + // 1. If object is a sequence, then for each header in object: + // Note: webidl conversion to array has already been done. + if (Array.isArray(object)) { + for (let i = 0; i < object.length; ++i) { + const header = object[i] + // 1. If header does not contain exactly two items, then throw a TypeError. + if (header.length !== 2) { + throw webidl.errors.exception({ + header: 'Headers constructor', + message: `expected name/value pair to be length 2, found ${header.length}.` + }) + } + + // 2. Append (header’s first item, header’s second item) to headers. + appendHeader(headers, header[0], header[1]) + } + } else if (typeof object === 'object' && object !== null) { + // Note: null should throw + + // 2. Otherwise, object is a record, then for each key → value in object, + // append (key, value) to headers + const keys = Object.keys(object) + for (let i = 0; i < keys.length; ++i) { + appendHeader(headers, keys[i], object[keys[i]]) + } + } else { + throw webidl.errors.conversionFailed({ + prefix: 'Headers constructor', + argument: 'Argument 1', + types: ['sequence>', 'record'] + }) + } +} + +/** + * @see https://fetch.spec.whatwg.org/#concept-headers-append + */ +function appendHeader (headers, name, value) { + // 1. Normalize value. + value = headerValueNormalize(value) + + // 2. If name is not a header name or value is not a + // header value, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.append', + value: name, + type: 'header name' + }) + } else if (!isValidHeaderValue(value)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.append', + value, + type: 'header value' + }) + } + + // 3. If headers’s guard is "immutable", then throw a TypeError. + // 4. Otherwise, if headers’s guard is "request" and name is a + // forbidden header name, return. + // Note: undici does not implement forbidden header names + if (headers[kGuard] === 'immutable') { + throw new TypeError('immutable') + } else if (headers[kGuard] === 'request-no-cors') { + // 5. Otherwise, if headers’s guard is "request-no-cors": + // TODO + } + + // 6. Otherwise, if headers’s guard is "response" and name is a + // forbidden response-header name, return. + + // 7. Append (name, value) to headers’s header list. + return headers[kHeadersList].append(name, value) + + // 8. If headers’s guard is "request-no-cors", then remove + // privileged no-CORS request headers from headers +} + +class HeadersList { + /** @type {[string, string][]|null} */ + cookies = null + + constructor (init) { + if (init instanceof HeadersList) { + this[kHeadersMap] = new Map(init[kHeadersMap]) + this[kHeadersSortedMap] = init[kHeadersSortedMap] + this.cookies = init.cookies === null ? null : [...init.cookies] + } else { + this[kHeadersMap] = new Map(init) + this[kHeadersSortedMap] = null + } + } + + // https://fetch.spec.whatwg.org/#header-list-contains + contains (name) { + // A header list list contains a header name name if list + // contains a header whose name is a byte-case-insensitive + // match for name. + name = name.toLowerCase() + + return this[kHeadersMap].has(name) + } + + clear () { + this[kHeadersMap].clear() + this[kHeadersSortedMap] = null + this.cookies = null + } + + // https://fetch.spec.whatwg.org/#concept-header-list-append + append (name, value) { + this[kHeadersSortedMap] = null + + // 1. If list contains name, then set name to the first such + // header’s name. + const lowercaseName = name.toLowerCase() + const exists = this[kHeadersMap].get(lowercaseName) + + // 2. Append (name, value) to list. + if (exists) { + const delimiter = lowercaseName === 'cookie' ? '; ' : ', ' + this[kHeadersMap].set(lowercaseName, { + name: exists.name, + value: `${exists.value}${delimiter}${value}` + }) + } else { + this[kHeadersMap].set(lowercaseName, { name, value }) + } + + if (lowercaseName === 'set-cookie') { + this.cookies ??= [] + this.cookies.push(value) + } + } + + // https://fetch.spec.whatwg.org/#concept-header-list-set + set (name, value) { + this[kHeadersSortedMap] = null + const lowercaseName = name.toLowerCase() + + if (lowercaseName === 'set-cookie') { + this.cookies = [value] + } + + // 1. If list contains name, then set the value of + // the first such header to value and remove the + // others. + // 2. Otherwise, append header (name, value) to list. + this[kHeadersMap].set(lowercaseName, { name, value }) + } + + // https://fetch.spec.whatwg.org/#concept-header-list-delete + delete (name) { + this[kHeadersSortedMap] = null + + name = name.toLowerCase() + + if (name === 'set-cookie') { + this.cookies = null + } + + this[kHeadersMap].delete(name) + } + + // https://fetch.spec.whatwg.org/#concept-header-list-get + get (name) { + const value = this[kHeadersMap].get(name.toLowerCase()) + + // 1. If list does not contain name, then return null. + // 2. Return the values of all headers in list whose name + // is a byte-case-insensitive match for name, + // separated from each other by 0x2C 0x20, in order. + return value === undefined ? null : value.value + } + + * [Symbol.iterator] () { + // use the lowercased name + for (const [name, { value }] of this[kHeadersMap]) { + yield [name, value] + } + } + + get entries () { + const headers = {} + + if (this[kHeadersMap].size) { + for (const { name, value } of this[kHeadersMap].values()) { + headers[name] = value + } + } + + return headers + } +} + +// https://fetch.spec.whatwg.org/#headers-class +class Headers { + constructor (init = undefined) { + if (init === kConstruct) { + return + } + this[kHeadersList] = new HeadersList() + + // The new Headers(init) constructor steps are: + + // 1. Set this’s guard to "none". + this[kGuard] = 'none' + + // 2. If init is given, then fill this with init. + if (init !== undefined) { + init = webidl.converters.HeadersInit(init) + fill(this, init) + } + } + + // https://fetch.spec.whatwg.org/#dom-headers-append + append (name, value) { + webidl.brandCheck(this, Headers) + + webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.append' }) + + name = webidl.converters.ByteString(name) + value = webidl.converters.ByteString(value) + + return appendHeader(this, name, value) + } + + // https://fetch.spec.whatwg.org/#dom-headers-delete + delete (name) { + webidl.brandCheck(this, Headers) + + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.delete' }) + + name = webidl.converters.ByteString(name) + + // 1. If name is not a header name, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.delete', + value: name, + type: 'header name' + }) + } + + // 2. If this’s guard is "immutable", then throw a TypeError. + // 3. Otherwise, if this’s guard is "request" and name is a + // forbidden header name, return. + // 4. Otherwise, if this’s guard is "request-no-cors", name + // is not a no-CORS-safelisted request-header name, and + // name is not a privileged no-CORS request-header name, + // return. + // 5. Otherwise, if this’s guard is "response" and name is + // a forbidden response-header name, return. + // Note: undici does not implement forbidden header names + if (this[kGuard] === 'immutable') { + throw new TypeError('immutable') + } else if (this[kGuard] === 'request-no-cors') { + // TODO + } + + // 6. If this’s header list does not contain name, then + // return. + if (!this[kHeadersList].contains(name)) { + return + } + + // 7. Delete name from this’s header list. + // 8. If this’s guard is "request-no-cors", then remove + // privileged no-CORS request headers from this. + this[kHeadersList].delete(name) + } + + // https://fetch.spec.whatwg.org/#dom-headers-get + get (name) { + webidl.brandCheck(this, Headers) + + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.get' }) + + name = webidl.converters.ByteString(name) + + // 1. If name is not a header name, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.get', + value: name, + type: 'header name' + }) + } + + // 2. Return the result of getting name from this’s header + // list. + return this[kHeadersList].get(name) + } + + // https://fetch.spec.whatwg.org/#dom-headers-has + has (name) { + webidl.brandCheck(this, Headers) + + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.has' }) + + name = webidl.converters.ByteString(name) + + // 1. If name is not a header name, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.has', + value: name, + type: 'header name' + }) + } + + // 2. Return true if this’s header list contains name; + // otherwise false. + return this[kHeadersList].contains(name) + } + + // https://fetch.spec.whatwg.org/#dom-headers-set + set (name, value) { + webidl.brandCheck(this, Headers) + + webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.set' }) + + name = webidl.converters.ByteString(name) + value = webidl.converters.ByteString(value) + + // 1. Normalize value. + value = headerValueNormalize(value) + + // 2. If name is not a header name or value is not a + // header value, then throw a TypeError. + if (!isValidHeaderName(name)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.set', + value: name, + type: 'header name' + }) + } else if (!isValidHeaderValue(value)) { + throw webidl.errors.invalidArgument({ + prefix: 'Headers.set', + value, + type: 'header value' + }) + } + + // 3. If this’s guard is "immutable", then throw a TypeError. + // 4. Otherwise, if this’s guard is "request" and name is a + // forbidden header name, return. + // 5. Otherwise, if this’s guard is "request-no-cors" and + // name/value is not a no-CORS-safelisted request-header, + // return. + // 6. Otherwise, if this’s guard is "response" and name is a + // forbidden response-header name, return. + // Note: undici does not implement forbidden header names + if (this[kGuard] === 'immutable') { + throw new TypeError('immutable') + } else if (this[kGuard] === 'request-no-cors') { + // TODO + } + + // 7. Set (name, value) in this’s header list. + // 8. If this’s guard is "request-no-cors", then remove + // privileged no-CORS request headers from this + this[kHeadersList].set(name, value) + } + + // https://fetch.spec.whatwg.org/#dom-headers-getsetcookie + getSetCookie () { + webidl.brandCheck(this, Headers) + + // 1. If this’s header list does not contain `Set-Cookie`, then return « ». + // 2. Return the values of all headers in this’s header list whose name is + // a byte-case-insensitive match for `Set-Cookie`, in order. + + const list = this[kHeadersList].cookies + + if (list) { + return [...list] + } + + return [] + } + + // https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine + get [kHeadersSortedMap] () { + if (this[kHeadersList][kHeadersSortedMap]) { + return this[kHeadersList][kHeadersSortedMap] + } + + // 1. Let headers be an empty list of headers with the key being the name + // and value the value. + const headers = [] + + // 2. Let names be the result of convert header names to a sorted-lowercase + // set with all the names of the headers in list. + const names = [...this[kHeadersList]].sort((a, b) => a[0] < b[0] ? -1 : 1) + const cookies = this[kHeadersList].cookies + + // 3. For each name of names: + for (let i = 0; i < names.length; ++i) { + const [name, value] = names[i] + // 1. If name is `set-cookie`, then: + if (name === 'set-cookie') { + // 1. Let values be a list of all values of headers in list whose name + // is a byte-case-insensitive match for name, in order. + + // 2. For each value of values: + // 1. Append (name, value) to headers. + for (let j = 0; j < cookies.length; ++j) { + headers.push([name, cookies[j]]) + } + } else { + // 2. Otherwise: + + // 1. Let value be the result of getting name from list. + + // 2. Assert: value is non-null. + assert(value !== null) + + // 3. Append (name, value) to headers. + headers.push([name, value]) + } + } + + this[kHeadersList][kHeadersSortedMap] = headers + + // 4. Return headers. + return headers + } + + keys () { + webidl.brandCheck(this, Headers) + + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'key') + } + + return makeIterator( + () => [...this[kHeadersSortedMap].values()], + 'Headers', + 'key' + ) + } + + values () { + webidl.brandCheck(this, Headers) + + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'value') + } + + return makeIterator( + () => [...this[kHeadersSortedMap].values()], + 'Headers', + 'value' + ) + } + + entries () { + webidl.brandCheck(this, Headers) + + if (this[kGuard] === 'immutable') { + const value = this[kHeadersSortedMap] + return makeIterator(() => value, 'Headers', + 'key+value') + } + + return makeIterator( + () => [...this[kHeadersSortedMap].values()], + 'Headers', + 'key+value' + ) + } + + /** + * @param {(value: string, key: string, self: Headers) => void} callbackFn + * @param {unknown} thisArg + */ + forEach (callbackFn, thisArg = globalThis) { + webidl.brandCheck(this, Headers) + + webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.forEach' }) + + if (typeof callbackFn !== 'function') { + throw new TypeError( + "Failed to execute 'forEach' on 'Headers': parameter 1 is not of type 'Function'." + ) + } + + for (const [key, value] of this) { + callbackFn.apply(thisArg, [value, key, this]) + } + } + + [Symbol.for('nodejs.util.inspect.custom')] () { + webidl.brandCheck(this, Headers) + + return this[kHeadersList] + } +} + +Headers.prototype[Symbol.iterator] = Headers.prototype.entries + +Object.defineProperties(Headers.prototype, { + append: kEnumerableProperty, + delete: kEnumerableProperty, + get: kEnumerableProperty, + has: kEnumerableProperty, + set: kEnumerableProperty, + getSetCookie: kEnumerableProperty, + keys: kEnumerableProperty, + values: kEnumerableProperty, + entries: kEnumerableProperty, + forEach: kEnumerableProperty, + [Symbol.iterator]: { enumerable: false }, + [Symbol.toStringTag]: { + value: 'Headers', + configurable: true + } +}) + +webidl.converters.HeadersInit = function (V) { + if (webidl.util.Type(V) === 'Object') { + if (V[Symbol.iterator]) { + return webidl.converters['sequence>'](V) + } + + return webidl.converters['record'](V) + } + + throw webidl.errors.conversionFailed({ + prefix: 'Headers constructor', + argument: 'Argument 1', + types: ['sequence>', 'record'] + }) +} + +module.exports = { + fill, + Headers, + HeadersList +} + + +/***/ }), + +/***/ 2315: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +// https://github.com/Ethan-Arrowood/undici-fetch + + + +const { + Response, + makeNetworkError, + makeAppropriateNetworkError, + filterResponse, + makeResponse +} = __nccwpck_require__(8676) +const { Headers } = __nccwpck_require__(6349) +const { Request, makeRequest } = __nccwpck_require__(5194) +const zlib = __nccwpck_require__(3106) +const { + bytesMatch, + makePolicyContainer, + clonePolicyContainer, + requestBadPort, + TAOCheck, + appendRequestOriginHeader, + responseLocationURL, + requestCurrentURL, + setRequestReferrerPolicyOnRedirect, + tryUpgradeRequestToAPotentiallyTrustworthyURL, + createOpaqueTimingInfo, + appendFetchMetadata, + corsCheck, + crossOriginResourcePolicyCheck, + determineRequestsReferrer, + coarsenedSharedCurrentTime, + createDeferredPromise, + isBlobLike, + sameOrigin, + isCancelled, + isAborted, + isErrorLike, + fullyReadBody, + readableStreamClose, + isomorphicEncode, + urlIsLocal, + urlIsHttpHttpsScheme, + urlHasHttpsScheme +} = __nccwpck_require__(5523) +const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(9710) +const assert = __nccwpck_require__(2613) +const { safelyExtractBody } = __nccwpck_require__(8923) +const { + redirectStatusSet, + nullBodyStatus, + safeMethodsSet, + requestBodyHeader, + subresourceSet, + DOMException +} = __nccwpck_require__(7326) +const { kHeadersList } = __nccwpck_require__(6443) +const EE = __nccwpck_require__(4434) +const { Readable, pipeline } = __nccwpck_require__(2203) +const { addAbortListener, isErrored, isReadable, nodeMajor, nodeMinor } = __nccwpck_require__(3440) +const { dataURLProcessor, serializeAMimeType } = __nccwpck_require__(4322) +const { TransformStream } = __nccwpck_require__(3774) +const { getGlobalDispatcher } = __nccwpck_require__(2581) +const { webidl } = __nccwpck_require__(4222) +const { STATUS_CODES } = __nccwpck_require__(8611) +const GET_OR_HEAD = ['GET', 'HEAD'] + +/** @type {import('buffer').resolveObjectURL} */ +let resolveObjectURL +let ReadableStream = globalThis.ReadableStream + +class Fetch extends EE { + constructor (dispatcher) { + super() + + this.dispatcher = dispatcher + this.connection = null + this.dump = false + this.state = 'ongoing' + // 2 terminated listeners get added per request, + // but only 1 gets removed. If there are 20 redirects, + // 21 listeners will be added. + // See https://github.com/nodejs/undici/issues/1711 + // TODO (fix): Find and fix root cause for leaked listener. + this.setMaxListeners(21) + } + + terminate (reason) { + if (this.state !== 'ongoing') { + return + } + + this.state = 'terminated' + this.connection?.destroy(reason) + this.emit('terminated', reason) + } + + // https://fetch.spec.whatwg.org/#fetch-controller-abort + abort (error) { + if (this.state !== 'ongoing') { + return + } + + // 1. Set controller’s state to "aborted". + this.state = 'aborted' + + // 2. Let fallbackError be an "AbortError" DOMException. + // 3. Set error to fallbackError if it is not given. + if (!error) { + error = new DOMException('The operation was aborted.', 'AbortError') + } + + // 4. Let serializedError be StructuredSerialize(error). + // If that threw an exception, catch it, and let + // serializedError be StructuredSerialize(fallbackError). + + // 5. Set controller’s serialized abort reason to serializedError. + this.serializedAbortReason = error + + this.connection?.destroy(error) + this.emit('terminated', error) + } +} + +// https://fetch.spec.whatwg.org/#fetch-method +function fetch (input, init = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'globalThis.fetch' }) + + // 1. Let p be a new promise. + const p = createDeferredPromise() + + // 2. Let requestObject be the result of invoking the initial value of + // Request as constructor with input and init as arguments. If this throws + // an exception, reject p with it and return p. + let requestObject + + try { + requestObject = new Request(input, init) + } catch (e) { + p.reject(e) + return p.promise + } + + // 3. Let request be requestObject’s request. + const request = requestObject[kState] + + // 4. If requestObject’s signal’s aborted flag is set, then: + if (requestObject.signal.aborted) { + // 1. Abort the fetch() call with p, request, null, and + // requestObject’s signal’s abort reason. + abortFetch(p, request, null, requestObject.signal.reason) + + // 2. Return p. + return p.promise + } + + // 5. Let globalObject be request’s client’s global object. + const globalObject = request.client.globalObject + + // 6. If globalObject is a ServiceWorkerGlobalScope object, then set + // request’s service-workers mode to "none". + if (globalObject?.constructor?.name === 'ServiceWorkerGlobalScope') { + request.serviceWorkers = 'none' + } + + // 7. Let responseObject be null. + let responseObject = null + + // 8. Let relevantRealm be this’s relevant Realm. + const relevantRealm = null + + // 9. Let locallyAborted be false. + let locallyAborted = false + + // 10. Let controller be null. + let controller = null + + // 11. Add the following abort steps to requestObject’s signal: + addAbortListener( + requestObject.signal, + () => { + // 1. Set locallyAborted to true. + locallyAborted = true + + // 2. Assert: controller is non-null. + assert(controller != null) + + // 3. Abort controller with requestObject’s signal’s abort reason. + controller.abort(requestObject.signal.reason) + + // 4. Abort the fetch() call with p, request, responseObject, + // and requestObject’s signal’s abort reason. + abortFetch(p, request, responseObject, requestObject.signal.reason) + } + ) + + // 12. Let handleFetchDone given response response be to finalize and + // report timing with response, globalObject, and "fetch". + const handleFetchDone = (response) => + finalizeAndReportTiming(response, 'fetch') + + // 13. Set controller to the result of calling fetch given request, + // with processResponseEndOfBody set to handleFetchDone, and processResponse + // given response being these substeps: + + const processResponse = (response) => { + // 1. If locallyAborted is true, terminate these substeps. + if (locallyAborted) { + return Promise.resolve() + } + + // 2. If response’s aborted flag is set, then: + if (response.aborted) { + // 1. Let deserializedError be the result of deserialize a serialized + // abort reason given controller’s serialized abort reason and + // relevantRealm. + + // 2. Abort the fetch() call with p, request, responseObject, and + // deserializedError. + + abortFetch(p, request, responseObject, controller.serializedAbortReason) + return Promise.resolve() + } + + // 3. If response is a network error, then reject p with a TypeError + // and terminate these substeps. + if (response.type === 'error') { + p.reject( + Object.assign(new TypeError('fetch failed'), { cause: response.error }) + ) + return Promise.resolve() + } + + // 4. Set responseObject to the result of creating a Response object, + // given response, "immutable", and relevantRealm. + responseObject = new Response() + responseObject[kState] = response + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kHeadersList] = response.headersList + responseObject[kHeaders][kGuard] = 'immutable' + responseObject[kHeaders][kRealm] = relevantRealm + + // 5. Resolve p with responseObject. + p.resolve(responseObject) + } + + controller = fetching({ + request, + processResponseEndOfBody: handleFetchDone, + processResponse, + dispatcher: init.dispatcher ?? getGlobalDispatcher() // undici + }) + + // 14. Return p. + return p.promise +} + +// https://fetch.spec.whatwg.org/#finalize-and-report-timing +function finalizeAndReportTiming (response, initiatorType = 'other') { + // 1. If response is an aborted network error, then return. + if (response.type === 'error' && response.aborted) { + return + } + + // 2. If response’s URL list is null or empty, then return. + if (!response.urlList?.length) { + return + } + + // 3. Let originalURL be response’s URL list[0]. + const originalURL = response.urlList[0] + + // 4. Let timingInfo be response’s timing info. + let timingInfo = response.timingInfo + + // 5. Let cacheState be response’s cache state. + let cacheState = response.cacheState + + // 6. If originalURL’s scheme is not an HTTP(S) scheme, then return. + if (!urlIsHttpHttpsScheme(originalURL)) { + return + } + + // 7. If timingInfo is null, then return. + if (timingInfo === null) { + return + } + + // 8. If response’s timing allow passed flag is not set, then: + if (!response.timingAllowPassed) { + // 1. Set timingInfo to a the result of creating an opaque timing info for timingInfo. + timingInfo = createOpaqueTimingInfo({ + startTime: timingInfo.startTime + }) + + // 2. Set cacheState to the empty string. + cacheState = '' + } + + // 9. Set timingInfo’s end time to the coarsened shared current time + // given global’s relevant settings object’s cross-origin isolated + // capability. + // TODO: given global’s relevant settings object’s cross-origin isolated + // capability? + timingInfo.endTime = coarsenedSharedCurrentTime() + + // 10. Set response’s timing info to timingInfo. + response.timingInfo = timingInfo + + // 11. Mark resource timing for timingInfo, originalURL, initiatorType, + // global, and cacheState. + markResourceTiming( + timingInfo, + originalURL, + initiatorType, + globalThis, + cacheState + ) +} + +// https://w3c.github.io/resource-timing/#dfn-mark-resource-timing +function markResourceTiming (timingInfo, originalURL, initiatorType, globalThis, cacheState) { + if (nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 2)) { + performance.markResourceTiming(timingInfo, originalURL.href, initiatorType, globalThis, cacheState) + } +} + +// https://fetch.spec.whatwg.org/#abort-fetch +function abortFetch (p, request, responseObject, error) { + // Note: AbortSignal.reason was added in node v17.2.0 + // which would give us an undefined error to reject with. + // Remove this once node v16 is no longer supported. + if (!error) { + error = new DOMException('The operation was aborted.', 'AbortError') + } + + // 1. Reject promise with error. + p.reject(error) + + // 2. If request’s body is not null and is readable, then cancel request’s + // body with error. + if (request.body != null && isReadable(request.body?.stream)) { + request.body.stream.cancel(error).catch((err) => { + if (err.code === 'ERR_INVALID_STATE') { + // Node bug? + return + } + throw err + }) + } + + // 3. If responseObject is null, then return. + if (responseObject == null) { + return + } + + // 4. Let response be responseObject’s response. + const response = responseObject[kState] + + // 5. If response’s body is not null and is readable, then error response’s + // body with error. + if (response.body != null && isReadable(response.body?.stream)) { + response.body.stream.cancel(error).catch((err) => { + if (err.code === 'ERR_INVALID_STATE') { + // Node bug? + return + } + throw err + }) + } +} + +// https://fetch.spec.whatwg.org/#fetching +function fetching ({ + request, + processRequestBodyChunkLength, + processRequestEndOfBody, + processResponse, + processResponseEndOfBody, + processResponseConsumeBody, + useParallelQueue = false, + dispatcher // undici +}) { + // 1. Let taskDestination be null. + let taskDestination = null + + // 2. Let crossOriginIsolatedCapability be false. + let crossOriginIsolatedCapability = false + + // 3. If request’s client is non-null, then: + if (request.client != null) { + // 1. Set taskDestination to request’s client’s global object. + taskDestination = request.client.globalObject + + // 2. Set crossOriginIsolatedCapability to request’s client’s cross-origin + // isolated capability. + crossOriginIsolatedCapability = + request.client.crossOriginIsolatedCapability + } + + // 4. If useParallelQueue is true, then set taskDestination to the result of + // starting a new parallel queue. + // TODO + + // 5. Let timingInfo be a new fetch timing info whose start time and + // post-redirect start time are the coarsened shared current time given + // crossOriginIsolatedCapability. + const currenTime = coarsenedSharedCurrentTime(crossOriginIsolatedCapability) + const timingInfo = createOpaqueTimingInfo({ + startTime: currenTime + }) + + // 6. Let fetchParams be a new fetch params whose + // request is request, + // timing info is timingInfo, + // process request body chunk length is processRequestBodyChunkLength, + // process request end-of-body is processRequestEndOfBody, + // process response is processResponse, + // process response consume body is processResponseConsumeBody, + // process response end-of-body is processResponseEndOfBody, + // task destination is taskDestination, + // and cross-origin isolated capability is crossOriginIsolatedCapability. + const fetchParams = { + controller: new Fetch(dispatcher), + request, + timingInfo, + processRequestBodyChunkLength, + processRequestEndOfBody, + processResponse, + processResponseConsumeBody, + processResponseEndOfBody, + taskDestination, + crossOriginIsolatedCapability + } + + // 7. If request’s body is a byte sequence, then set request’s body to + // request’s body as a body. + // NOTE: Since fetching is only called from fetch, body should already be + // extracted. + assert(!request.body || request.body.stream) + + // 8. If request’s window is "client", then set request’s window to request’s + // client, if request’s client’s global object is a Window object; otherwise + // "no-window". + if (request.window === 'client') { + // TODO: What if request.client is null? + request.window = + request.client?.globalObject?.constructor?.name === 'Window' + ? request.client + : 'no-window' + } + + // 9. If request’s origin is "client", then set request’s origin to request’s + // client’s origin. + if (request.origin === 'client') { + // TODO: What if request.client is null? + request.origin = request.client?.origin + } + + // 10. If all of the following conditions are true: + // TODO + + // 11. If request’s policy container is "client", then: + if (request.policyContainer === 'client') { + // 1. If request’s client is non-null, then set request’s policy + // container to a clone of request’s client’s policy container. [HTML] + if (request.client != null) { + request.policyContainer = clonePolicyContainer( + request.client.policyContainer + ) + } else { + // 2. Otherwise, set request’s policy container to a new policy + // container. + request.policyContainer = makePolicyContainer() + } + } + + // 12. If request’s header list does not contain `Accept`, then: + if (!request.headersList.contains('accept')) { + // 1. Let value be `*/*`. + const value = '*/*' + + // 2. A user agent should set value to the first matching statement, if + // any, switching on request’s destination: + // "document" + // "frame" + // "iframe" + // `text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8` + // "image" + // `image/png,image/svg+xml,image/*;q=0.8,*/*;q=0.5` + // "style" + // `text/css,*/*;q=0.1` + // TODO + + // 3. Append `Accept`/value to request’s header list. + request.headersList.append('accept', value) + } + + // 13. If request’s header list does not contain `Accept-Language`, then + // user agents should append `Accept-Language`/an appropriate value to + // request’s header list. + if (!request.headersList.contains('accept-language')) { + request.headersList.append('accept-language', '*') + } + + // 14. If request’s priority is null, then use request’s initiator and + // destination appropriately in setting request’s priority to a + // user-agent-defined object. + if (request.priority === null) { + // TODO + } + + // 15. If request is a subresource request, then: + if (subresourceSet.has(request.destination)) { + // TODO + } + + // 16. Run main fetch given fetchParams. + mainFetch(fetchParams) + .catch(err => { + fetchParams.controller.terminate(err) + }) + + // 17. Return fetchParam's controller + return fetchParams.controller +} + +// https://fetch.spec.whatwg.org/#concept-main-fetch +async function mainFetch (fetchParams, recursive = false) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request + + // 2. Let response be null. + let response = null + + // 3. If request’s local-URLs-only flag is set and request’s current URL is + // not local, then set response to a network error. + if (request.localURLsOnly && !urlIsLocal(requestCurrentURL(request))) { + response = makeNetworkError('local URLs only') + } + + // 4. Run report Content Security Policy violations for request. + // TODO + + // 5. Upgrade request to a potentially trustworthy URL, if appropriate. + tryUpgradeRequestToAPotentiallyTrustworthyURL(request) + + // 6. If should request be blocked due to a bad port, should fetching request + // be blocked as mixed content, or should request be blocked by Content + // Security Policy returns blocked, then set response to a network error. + if (requestBadPort(request) === 'blocked') { + response = makeNetworkError('bad port') + } + // TODO: should fetching request be blocked as mixed content? + // TODO: should request be blocked by Content Security Policy? + + // 7. If request’s referrer policy is the empty string, then set request’s + // referrer policy to request’s policy container’s referrer policy. + if (request.referrerPolicy === '') { + request.referrerPolicy = request.policyContainer.referrerPolicy + } + + // 8. If request’s referrer is not "no-referrer", then set request’s + // referrer to the result of invoking determine request’s referrer. + if (request.referrer !== 'no-referrer') { + request.referrer = determineRequestsReferrer(request) + } + + // 9. Set request’s current URL’s scheme to "https" if all of the following + // conditions are true: + // - request’s current URL’s scheme is "http" + // - request’s current URL’s host is a domain + // - Matching request’s current URL’s host per Known HSTS Host Domain Name + // Matching results in either a superdomain match with an asserted + // includeSubDomains directive or a congruent match (with or without an + // asserted includeSubDomains directive). [HSTS] + // TODO + + // 10. If recursive is false, then run the remaining steps in parallel. + // TODO + + // 11. If response is null, then set response to the result of running + // the steps corresponding to the first matching statement: + if (response === null) { + response = await (async () => { + const currentURL = requestCurrentURL(request) + + if ( + // - request’s current URL’s origin is same origin with request’s origin, + // and request’s response tainting is "basic" + (sameOrigin(currentURL, request.url) && request.responseTainting === 'basic') || + // request’s current URL’s scheme is "data" + (currentURL.protocol === 'data:') || + // - request’s mode is "navigate" or "websocket" + (request.mode === 'navigate' || request.mode === 'websocket') + ) { + // 1. Set request’s response tainting to "basic". + request.responseTainting = 'basic' + + // 2. Return the result of running scheme fetch given fetchParams. + return await schemeFetch(fetchParams) + } + + // request’s mode is "same-origin" + if (request.mode === 'same-origin') { + // 1. Return a network error. + return makeNetworkError('request mode cannot be "same-origin"') + } + + // request’s mode is "no-cors" + if (request.mode === 'no-cors') { + // 1. If request’s redirect mode is not "follow", then return a network + // error. + if (request.redirect !== 'follow') { + return makeNetworkError( + 'redirect mode cannot be "follow" for "no-cors" request' + ) + } + + // 2. Set request’s response tainting to "opaque". + request.responseTainting = 'opaque' + + // 3. Return the result of running scheme fetch given fetchParams. + return await schemeFetch(fetchParams) + } + + // request’s current URL’s scheme is not an HTTP(S) scheme + if (!urlIsHttpHttpsScheme(requestCurrentURL(request))) { + // Return a network error. + return makeNetworkError('URL scheme must be a HTTP(S) scheme') + } + + // - request’s use-CORS-preflight flag is set + // - request’s unsafe-request flag is set and either request’s method is + // not a CORS-safelisted method or CORS-unsafe request-header names with + // request’s header list is not empty + // 1. Set request’s response tainting to "cors". + // 2. Let corsWithPreflightResponse be the result of running HTTP fetch + // given fetchParams and true. + // 3. If corsWithPreflightResponse is a network error, then clear cache + // entries using request. + // 4. Return corsWithPreflightResponse. + // TODO + + // Otherwise + // 1. Set request’s response tainting to "cors". + request.responseTainting = 'cors' + + // 2. Return the result of running HTTP fetch given fetchParams. + return await httpFetch(fetchParams) + })() + } + + // 12. If recursive is true, then return response. + if (recursive) { + return response + } + + // 13. If response is not a network error and response is not a filtered + // response, then: + if (response.status !== 0 && !response.internalResponse) { + // If request’s response tainting is "cors", then: + if (request.responseTainting === 'cors') { + // 1. Let headerNames be the result of extracting header list values + // given `Access-Control-Expose-Headers` and response’s header list. + // TODO + // 2. If request’s credentials mode is not "include" and headerNames + // contains `*`, then set response’s CORS-exposed header-name list to + // all unique header names in response’s header list. + // TODO + // 3. Otherwise, if headerNames is not null or failure, then set + // response’s CORS-exposed header-name list to headerNames. + // TODO + } + + // Set response to the following filtered response with response as its + // internal response, depending on request’s response tainting: + if (request.responseTainting === 'basic') { + response = filterResponse(response, 'basic') + } else if (request.responseTainting === 'cors') { + response = filterResponse(response, 'cors') + } else if (request.responseTainting === 'opaque') { + response = filterResponse(response, 'opaque') + } else { + assert(false) + } + } + + // 14. Let internalResponse be response, if response is a network error, + // and response’s internal response otherwise. + let internalResponse = + response.status === 0 ? response : response.internalResponse + + // 15. If internalResponse’s URL list is empty, then set it to a clone of + // request’s URL list. + if (internalResponse.urlList.length === 0) { + internalResponse.urlList.push(...request.urlList) + } + + // 16. If request’s timing allow failed flag is unset, then set + // internalResponse’s timing allow passed flag. + if (!request.timingAllowFailed) { + response.timingAllowPassed = true + } + + // 17. If response is not a network error and any of the following returns + // blocked + // - should internalResponse to request be blocked as mixed content + // - should internalResponse to request be blocked by Content Security Policy + // - should internalResponse to request be blocked due to its MIME type + // - should internalResponse to request be blocked due to nosniff + // TODO + + // 18. If response’s type is "opaque", internalResponse’s status is 206, + // internalResponse’s range-requested flag is set, and request’s header + // list does not contain `Range`, then set response and internalResponse + // to a network error. + if ( + response.type === 'opaque' && + internalResponse.status === 206 && + internalResponse.rangeRequested && + !request.headers.contains('range') + ) { + response = internalResponse = makeNetworkError() + } + + // 19. If response is not a network error and either request’s method is + // `HEAD` or `CONNECT`, or internalResponse’s status is a null body status, + // set internalResponse’s body to null and disregard any enqueuing toward + // it (if any). + if ( + response.status !== 0 && + (request.method === 'HEAD' || + request.method === 'CONNECT' || + nullBodyStatus.includes(internalResponse.status)) + ) { + internalResponse.body = null + fetchParams.controller.dump = true + } + + // 20. If request’s integrity metadata is not the empty string, then: + if (request.integrity) { + // 1. Let processBodyError be this step: run fetch finale given fetchParams + // and a network error. + const processBodyError = (reason) => + fetchFinale(fetchParams, makeNetworkError(reason)) + + // 2. If request’s response tainting is "opaque", or response’s body is null, + // then run processBodyError and abort these steps. + if (request.responseTainting === 'opaque' || response.body == null) { + processBodyError(response.error) + return + } + + // 3. Let processBody given bytes be these steps: + const processBody = (bytes) => { + // 1. If bytes do not match request’s integrity metadata, + // then run processBodyError and abort these steps. [SRI] + if (!bytesMatch(bytes, request.integrity)) { + processBodyError('integrity mismatch') + return + } + + // 2. Set response’s body to bytes as a body. + response.body = safelyExtractBody(bytes)[0] + + // 3. Run fetch finale given fetchParams and response. + fetchFinale(fetchParams, response) + } + + // 4. Fully read response’s body given processBody and processBodyError. + await fullyReadBody(response.body, processBody, processBodyError) + } else { + // 21. Otherwise, run fetch finale given fetchParams and response. + fetchFinale(fetchParams, response) + } +} + +// https://fetch.spec.whatwg.org/#concept-scheme-fetch +// given a fetch params fetchParams +function schemeFetch (fetchParams) { + // Note: since the connection is destroyed on redirect, which sets fetchParams to a + // cancelled state, we do not want this condition to trigger *unless* there have been + // no redirects. See https://github.com/nodejs/undici/issues/1776 + // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams. + if (isCancelled(fetchParams) && fetchParams.request.redirectCount === 0) { + return Promise.resolve(makeAppropriateNetworkError(fetchParams)) + } + + // 2. Let request be fetchParams’s request. + const { request } = fetchParams + + const { protocol: scheme } = requestCurrentURL(request) + + // 3. Switch on request’s current URL’s scheme and run the associated steps: + switch (scheme) { + case 'about:': { + // If request’s current URL’s path is the string "blank", then return a new response + // whose status message is `OK`, header list is « (`Content-Type`, `text/html;charset=utf-8`) », + // and body is the empty byte sequence as a body. + + // Otherwise, return a network error. + return Promise.resolve(makeNetworkError('about scheme is not supported')) + } + case 'blob:': { + if (!resolveObjectURL) { + resolveObjectURL = (__nccwpck_require__(181).resolveObjectURL) + } + + // 1. Let blobURLEntry be request’s current URL’s blob URL entry. + const blobURLEntry = requestCurrentURL(request) + + // https://github.com/web-platform-tests/wpt/blob/7b0ebaccc62b566a1965396e5be7bb2bc06f841f/FileAPI/url/resources/fetch-tests.js#L52-L56 + // Buffer.resolveObjectURL does not ignore URL queries. + if (blobURLEntry.search.length !== 0) { + return Promise.resolve(makeNetworkError('NetworkError when attempting to fetch resource.')) + } + + const blobURLEntryObject = resolveObjectURL(blobURLEntry.toString()) + + // 2. If request’s method is not `GET`, blobURLEntry is null, or blobURLEntry’s + // object is not a Blob object, then return a network error. + if (request.method !== 'GET' || !isBlobLike(blobURLEntryObject)) { + return Promise.resolve(makeNetworkError('invalid method')) + } + + // 3. Let bodyWithType be the result of safely extracting blobURLEntry’s object. + const bodyWithType = safelyExtractBody(blobURLEntryObject) + + // 4. Let body be bodyWithType’s body. + const body = bodyWithType[0] + + // 5. Let length be body’s length, serialized and isomorphic encoded. + const length = isomorphicEncode(`${body.length}`) + + // 6. Let type be bodyWithType’s type if it is non-null; otherwise the empty byte sequence. + const type = bodyWithType[1] ?? '' + + // 7. Return a new response whose status message is `OK`, header list is + // « (`Content-Length`, length), (`Content-Type`, type) », and body is body. + const response = makeResponse({ + statusText: 'OK', + headersList: [ + ['content-length', { name: 'Content-Length', value: length }], + ['content-type', { name: 'Content-Type', value: type }] + ] + }) + + response.body = body + + return Promise.resolve(response) + } + case 'data:': { + // 1. Let dataURLStruct be the result of running the + // data: URL processor on request’s current URL. + const currentURL = requestCurrentURL(request) + const dataURLStruct = dataURLProcessor(currentURL) + + // 2. If dataURLStruct is failure, then return a + // network error. + if (dataURLStruct === 'failure') { + return Promise.resolve(makeNetworkError('failed to fetch the data URL')) + } + + // 3. Let mimeType be dataURLStruct’s MIME type, serialized. + const mimeType = serializeAMimeType(dataURLStruct.mimeType) + + // 4. Return a response whose status message is `OK`, + // header list is « (`Content-Type`, mimeType) », + // and body is dataURLStruct’s body as a body. + return Promise.resolve(makeResponse({ + statusText: 'OK', + headersList: [ + ['content-type', { name: 'Content-Type', value: mimeType }] + ], + body: safelyExtractBody(dataURLStruct.body)[0] + })) + } + case 'file:': { + // For now, unfortunate as it is, file URLs are left as an exercise for the reader. + // When in doubt, return a network error. + return Promise.resolve(makeNetworkError('not implemented... yet...')) + } + case 'http:': + case 'https:': { + // Return the result of running HTTP fetch given fetchParams. + + return httpFetch(fetchParams) + .catch((err) => makeNetworkError(err)) + } + default: { + return Promise.resolve(makeNetworkError('unknown scheme')) + } + } +} + +// https://fetch.spec.whatwg.org/#finalize-response +function finalizeResponse (fetchParams, response) { + // 1. Set fetchParams’s request’s done flag. + fetchParams.request.done = true + + // 2, If fetchParams’s process response done is not null, then queue a fetch + // task to run fetchParams’s process response done given response, with + // fetchParams’s task destination. + if (fetchParams.processResponseDone != null) { + queueMicrotask(() => fetchParams.processResponseDone(response)) + } +} + +// https://fetch.spec.whatwg.org/#fetch-finale +function fetchFinale (fetchParams, response) { + // 1. If response is a network error, then: + if (response.type === 'error') { + // 1. Set response’s URL list to « fetchParams’s request’s URL list[0] ». + response.urlList = [fetchParams.request.urlList[0]] + + // 2. Set response’s timing info to the result of creating an opaque timing + // info for fetchParams’s timing info. + response.timingInfo = createOpaqueTimingInfo({ + startTime: fetchParams.timingInfo.startTime + }) + } + + // 2. Let processResponseEndOfBody be the following steps: + const processResponseEndOfBody = () => { + // 1. Set fetchParams’s request’s done flag. + fetchParams.request.done = true + + // If fetchParams’s process response end-of-body is not null, + // then queue a fetch task to run fetchParams’s process response + // end-of-body given response with fetchParams’s task destination. + if (fetchParams.processResponseEndOfBody != null) { + queueMicrotask(() => fetchParams.processResponseEndOfBody(response)) + } + } + + // 3. If fetchParams’s process response is non-null, then queue a fetch task + // to run fetchParams’s process response given response, with fetchParams’s + // task destination. + if (fetchParams.processResponse != null) { + queueMicrotask(() => fetchParams.processResponse(response)) + } + + // 4. If response’s body is null, then run processResponseEndOfBody. + if (response.body == null) { + processResponseEndOfBody() + } else { + // 5. Otherwise: + + // 1. Let transformStream be a new a TransformStream. + + // 2. Let identityTransformAlgorithm be an algorithm which, given chunk, + // enqueues chunk in transformStream. + const identityTransformAlgorithm = (chunk, controller) => { + controller.enqueue(chunk) + } + + // 3. Set up transformStream with transformAlgorithm set to identityTransformAlgorithm + // and flushAlgorithm set to processResponseEndOfBody. + const transformStream = new TransformStream({ + start () {}, + transform: identityTransformAlgorithm, + flush: processResponseEndOfBody + }, { + size () { + return 1 + } + }, { + size () { + return 1 + } + }) + + // 4. Set response’s body to the result of piping response’s body through transformStream. + response.body = { stream: response.body.stream.pipeThrough(transformStream) } + } + + // 6. If fetchParams’s process response consume body is non-null, then: + if (fetchParams.processResponseConsumeBody != null) { + // 1. Let processBody given nullOrBytes be this step: run fetchParams’s + // process response consume body given response and nullOrBytes. + const processBody = (nullOrBytes) => fetchParams.processResponseConsumeBody(response, nullOrBytes) + + // 2. Let processBodyError be this step: run fetchParams’s process + // response consume body given response and failure. + const processBodyError = (failure) => fetchParams.processResponseConsumeBody(response, failure) + + // 3. If response’s body is null, then queue a fetch task to run processBody + // given null, with fetchParams’s task destination. + if (response.body == null) { + queueMicrotask(() => processBody(null)) + } else { + // 4. Otherwise, fully read response’s body given processBody, processBodyError, + // and fetchParams’s task destination. + return fullyReadBody(response.body, processBody, processBodyError) + } + return Promise.resolve() + } +} + +// https://fetch.spec.whatwg.org/#http-fetch +async function httpFetch (fetchParams) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request + + // 2. Let response be null. + let response = null + + // 3. Let actualResponse be null. + let actualResponse = null + + // 4. Let timingInfo be fetchParams’s timing info. + const timingInfo = fetchParams.timingInfo + + // 5. If request’s service-workers mode is "all", then: + if (request.serviceWorkers === 'all') { + // TODO + } + + // 6. If response is null, then: + if (response === null) { + // 1. If makeCORSPreflight is true and one of these conditions is true: + // TODO + + // 2. If request’s redirect mode is "follow", then set request’s + // service-workers mode to "none". + if (request.redirect === 'follow') { + request.serviceWorkers = 'none' + } + + // 3. Set response and actualResponse to the result of running + // HTTP-network-or-cache fetch given fetchParams. + actualResponse = response = await httpNetworkOrCacheFetch(fetchParams) + + // 4. If request’s response tainting is "cors" and a CORS check + // for request and response returns failure, then return a network error. + if ( + request.responseTainting === 'cors' && + corsCheck(request, response) === 'failure' + ) { + return makeNetworkError('cors failure') + } + + // 5. If the TAO check for request and response returns failure, then set + // request’s timing allow failed flag. + if (TAOCheck(request, response) === 'failure') { + request.timingAllowFailed = true + } + } + + // 7. If either request’s response tainting or response’s type + // is "opaque", and the cross-origin resource policy check with + // request’s origin, request’s client, request’s destination, + // and actualResponse returns blocked, then return a network error. + if ( + (request.responseTainting === 'opaque' || response.type === 'opaque') && + crossOriginResourcePolicyCheck( + request.origin, + request.client, + request.destination, + actualResponse + ) === 'blocked' + ) { + return makeNetworkError('blocked') + } + + // 8. If actualResponse’s status is a redirect status, then: + if (redirectStatusSet.has(actualResponse.status)) { + // 1. If actualResponse’s status is not 303, request’s body is not null, + // and the connection uses HTTP/2, then user agents may, and are even + // encouraged to, transmit an RST_STREAM frame. + // See, https://github.com/whatwg/fetch/issues/1288 + if (request.redirect !== 'manual') { + fetchParams.controller.connection.destroy() + } + + // 2. Switch on request’s redirect mode: + if (request.redirect === 'error') { + // Set response to a network error. + response = makeNetworkError('unexpected redirect') + } else if (request.redirect === 'manual') { + // Set response to an opaque-redirect filtered response whose internal + // response is actualResponse. + // NOTE(spec): On the web this would return an `opaqueredirect` response, + // but that doesn't make sense server side. + // See https://github.com/nodejs/undici/issues/1193. + response = actualResponse + } else if (request.redirect === 'follow') { + // Set response to the result of running HTTP-redirect fetch given + // fetchParams and response. + response = await httpRedirectFetch(fetchParams, response) + } else { + assert(false) + } + } + + // 9. Set response’s timing info to timingInfo. + response.timingInfo = timingInfo + + // 10. Return response. + return response +} + +// https://fetch.spec.whatwg.org/#http-redirect-fetch +function httpRedirectFetch (fetchParams, response) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request + + // 2. Let actualResponse be response, if response is not a filtered response, + // and response’s internal response otherwise. + const actualResponse = response.internalResponse + ? response.internalResponse + : response + + // 3. Let locationURL be actualResponse’s location URL given request’s current + // URL’s fragment. + let locationURL + + try { + locationURL = responseLocationURL( + actualResponse, + requestCurrentURL(request).hash + ) + + // 4. If locationURL is null, then return response. + if (locationURL == null) { + return response + } + } catch (err) { + // 5. If locationURL is failure, then return a network error. + return Promise.resolve(makeNetworkError(err)) + } + + // 6. If locationURL’s scheme is not an HTTP(S) scheme, then return a network + // error. + if (!urlIsHttpHttpsScheme(locationURL)) { + return Promise.resolve(makeNetworkError('URL scheme must be a HTTP(S) scheme')) + } + + // 7. If request’s redirect count is 20, then return a network error. + if (request.redirectCount === 20) { + return Promise.resolve(makeNetworkError('redirect count exceeded')) + } + + // 8. Increase request’s redirect count by 1. + request.redirectCount += 1 + + // 9. If request’s mode is "cors", locationURL includes credentials, and + // request’s origin is not same origin with locationURL’s origin, then return + // a network error. + if ( + request.mode === 'cors' && + (locationURL.username || locationURL.password) && + !sameOrigin(request, locationURL) + ) { + return Promise.resolve(makeNetworkError('cross origin not allowed for request mode "cors"')) + } + + // 10. If request’s response tainting is "cors" and locationURL includes + // credentials, then return a network error. + if ( + request.responseTainting === 'cors' && + (locationURL.username || locationURL.password) + ) { + return Promise.resolve(makeNetworkError( + 'URL cannot contain credentials for request mode "cors"' + )) + } + + // 11. If actualResponse’s status is not 303, request’s body is non-null, + // and request’s body’s source is null, then return a network error. + if ( + actualResponse.status !== 303 && + request.body != null && + request.body.source == null + ) { + return Promise.resolve(makeNetworkError()) + } + + // 12. If one of the following is true + // - actualResponse’s status is 301 or 302 and request’s method is `POST` + // - actualResponse’s status is 303 and request’s method is not `GET` or `HEAD` + if ( + ([301, 302].includes(actualResponse.status) && request.method === 'POST') || + (actualResponse.status === 303 && + !GET_OR_HEAD.includes(request.method)) + ) { + // then: + // 1. Set request’s method to `GET` and request’s body to null. + request.method = 'GET' + request.body = null + + // 2. For each headerName of request-body-header name, delete headerName from + // request’s header list. + for (const headerName of requestBodyHeader) { + request.headersList.delete(headerName) + } + } + + // 13. If request’s current URL’s origin is not same origin with locationURL’s + // origin, then for each headerName of CORS non-wildcard request-header name, + // delete headerName from request’s header list. + if (!sameOrigin(requestCurrentURL(request), locationURL)) { + // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name + request.headersList.delete('authorization') + + // https://fetch.spec.whatwg.org/#authentication-entries + request.headersList.delete('proxy-authorization', true) + + // "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement. + request.headersList.delete('cookie') + request.headersList.delete('host') + } + + // 14. If request’s body is non-null, then set request’s body to the first return + // value of safely extracting request’s body’s source. + if (request.body != null) { + assert(request.body.source != null) + request.body = safelyExtractBody(request.body.source)[0] + } + + // 15. Let timingInfo be fetchParams’s timing info. + const timingInfo = fetchParams.timingInfo + + // 16. Set timingInfo’s redirect end time and post-redirect start time to the + // coarsened shared current time given fetchParams’s cross-origin isolated + // capability. + timingInfo.redirectEndTime = timingInfo.postRedirectStartTime = + coarsenedSharedCurrentTime(fetchParams.crossOriginIsolatedCapability) + + // 17. If timingInfo’s redirect start time is 0, then set timingInfo’s + // redirect start time to timingInfo’s start time. + if (timingInfo.redirectStartTime === 0) { + timingInfo.redirectStartTime = timingInfo.startTime + } + + // 18. Append locationURL to request’s URL list. + request.urlList.push(locationURL) + + // 19. Invoke set request’s referrer policy on redirect on request and + // actualResponse. + setRequestReferrerPolicyOnRedirect(request, actualResponse) + + // 20. Return the result of running main fetch given fetchParams and true. + return mainFetch(fetchParams, true) +} + +// https://fetch.spec.whatwg.org/#http-network-or-cache-fetch +async function httpNetworkOrCacheFetch ( + fetchParams, + isAuthenticationFetch = false, + isNewConnectionFetch = false +) { + // 1. Let request be fetchParams’s request. + const request = fetchParams.request + + // 2. Let httpFetchParams be null. + let httpFetchParams = null + + // 3. Let httpRequest be null. + let httpRequest = null + + // 4. Let response be null. + let response = null + + // 5. Let storedResponse be null. + // TODO: cache + + // 6. Let httpCache be null. + const httpCache = null + + // 7. Let the revalidatingFlag be unset. + const revalidatingFlag = false + + // 8. Run these steps, but abort when the ongoing fetch is terminated: + + // 1. If request’s window is "no-window" and request’s redirect mode is + // "error", then set httpFetchParams to fetchParams and httpRequest to + // request. + if (request.window === 'no-window' && request.redirect === 'error') { + httpFetchParams = fetchParams + httpRequest = request + } else { + // Otherwise: + + // 1. Set httpRequest to a clone of request. + httpRequest = makeRequest(request) + + // 2. Set httpFetchParams to a copy of fetchParams. + httpFetchParams = { ...fetchParams } + + // 3. Set httpFetchParams’s request to httpRequest. + httpFetchParams.request = httpRequest + } + + // 3. Let includeCredentials be true if one of + const includeCredentials = + request.credentials === 'include' || + (request.credentials === 'same-origin' && + request.responseTainting === 'basic') + + // 4. Let contentLength be httpRequest’s body’s length, if httpRequest’s + // body is non-null; otherwise null. + const contentLength = httpRequest.body ? httpRequest.body.length : null + + // 5. Let contentLengthHeaderValue be null. + let contentLengthHeaderValue = null + + // 6. If httpRequest’s body is null and httpRequest’s method is `POST` or + // `PUT`, then set contentLengthHeaderValue to `0`. + if ( + httpRequest.body == null && + ['POST', 'PUT'].includes(httpRequest.method) + ) { + contentLengthHeaderValue = '0' + } + + // 7. If contentLength is non-null, then set contentLengthHeaderValue to + // contentLength, serialized and isomorphic encoded. + if (contentLength != null) { + contentLengthHeaderValue = isomorphicEncode(`${contentLength}`) + } + + // 8. If contentLengthHeaderValue is non-null, then append + // `Content-Length`/contentLengthHeaderValue to httpRequest’s header + // list. + if (contentLengthHeaderValue != null) { + httpRequest.headersList.append('content-length', contentLengthHeaderValue) + } + + // 9. If contentLengthHeaderValue is non-null, then append (`Content-Length`, + // contentLengthHeaderValue) to httpRequest’s header list. + + // 10. If contentLength is non-null and httpRequest’s keepalive is true, + // then: + if (contentLength != null && httpRequest.keepalive) { + // NOTE: keepalive is a noop outside of browser context. + } + + // 11. If httpRequest’s referrer is a URL, then append + // `Referer`/httpRequest’s referrer, serialized and isomorphic encoded, + // to httpRequest’s header list. + if (httpRequest.referrer instanceof URL) { + httpRequest.headersList.append('referer', isomorphicEncode(httpRequest.referrer.href)) + } + + // 12. Append a request `Origin` header for httpRequest. + appendRequestOriginHeader(httpRequest) + + // 13. Append the Fetch metadata headers for httpRequest. [FETCH-METADATA] + appendFetchMetadata(httpRequest) + + // 14. If httpRequest’s header list does not contain `User-Agent`, then + // user agents should append `User-Agent`/default `User-Agent` value to + // httpRequest’s header list. + if (!httpRequest.headersList.contains('user-agent')) { + httpRequest.headersList.append('user-agent', typeof esbuildDetection === 'undefined' ? 'undici' : 'node') + } + + // 15. If httpRequest’s cache mode is "default" and httpRequest’s header + // list contains `If-Modified-Since`, `If-None-Match`, + // `If-Unmodified-Since`, `If-Match`, or `If-Range`, then set + // httpRequest’s cache mode to "no-store". + if ( + httpRequest.cache === 'default' && + (httpRequest.headersList.contains('if-modified-since') || + httpRequest.headersList.contains('if-none-match') || + httpRequest.headersList.contains('if-unmodified-since') || + httpRequest.headersList.contains('if-match') || + httpRequest.headersList.contains('if-range')) + ) { + httpRequest.cache = 'no-store' + } + + // 16. If httpRequest’s cache mode is "no-cache", httpRequest’s prevent + // no-cache cache-control header modification flag is unset, and + // httpRequest’s header list does not contain `Cache-Control`, then append + // `Cache-Control`/`max-age=0` to httpRequest’s header list. + if ( + httpRequest.cache === 'no-cache' && + !httpRequest.preventNoCacheCacheControlHeaderModification && + !httpRequest.headersList.contains('cache-control') + ) { + httpRequest.headersList.append('cache-control', 'max-age=0') + } + + // 17. If httpRequest’s cache mode is "no-store" or "reload", then: + if (httpRequest.cache === 'no-store' || httpRequest.cache === 'reload') { + // 1. If httpRequest’s header list does not contain `Pragma`, then append + // `Pragma`/`no-cache` to httpRequest’s header list. + if (!httpRequest.headersList.contains('pragma')) { + httpRequest.headersList.append('pragma', 'no-cache') + } + + // 2. If httpRequest’s header list does not contain `Cache-Control`, + // then append `Cache-Control`/`no-cache` to httpRequest’s header list. + if (!httpRequest.headersList.contains('cache-control')) { + httpRequest.headersList.append('cache-control', 'no-cache') + } + } + + // 18. If httpRequest’s header list contains `Range`, then append + // `Accept-Encoding`/`identity` to httpRequest’s header list. + if (httpRequest.headersList.contains('range')) { + httpRequest.headersList.append('accept-encoding', 'identity') + } + + // 19. Modify httpRequest’s header list per HTTP. Do not append a given + // header if httpRequest’s header list contains that header’s name. + // TODO: https://github.com/whatwg/fetch/issues/1285#issuecomment-896560129 + if (!httpRequest.headersList.contains('accept-encoding')) { + if (urlHasHttpsScheme(requestCurrentURL(httpRequest))) { + httpRequest.headersList.append('accept-encoding', 'br, gzip, deflate') + } else { + httpRequest.headersList.append('accept-encoding', 'gzip, deflate') + } + } + + httpRequest.headersList.delete('host') + + // 20. If includeCredentials is true, then: + if (includeCredentials) { + // 1. If the user agent is not configured to block cookies for httpRequest + // (see section 7 of [COOKIES]), then: + // TODO: credentials + // 2. If httpRequest’s header list does not contain `Authorization`, then: + // TODO: credentials + } + + // 21. If there’s a proxy-authentication entry, use it as appropriate. + // TODO: proxy-authentication + + // 22. Set httpCache to the result of determining the HTTP cache + // partition, given httpRequest. + // TODO: cache + + // 23. If httpCache is null, then set httpRequest’s cache mode to + // "no-store". + if (httpCache == null) { + httpRequest.cache = 'no-store' + } + + // 24. If httpRequest’s cache mode is neither "no-store" nor "reload", + // then: + if (httpRequest.mode !== 'no-store' && httpRequest.mode !== 'reload') { + // TODO: cache + } + + // 9. If aborted, then return the appropriate network error for fetchParams. + // TODO + + // 10. If response is null, then: + if (response == null) { + // 1. If httpRequest’s cache mode is "only-if-cached", then return a + // network error. + if (httpRequest.mode === 'only-if-cached') { + return makeNetworkError('only if cached') + } + + // 2. Let forwardResponse be the result of running HTTP-network fetch + // given httpFetchParams, includeCredentials, and isNewConnectionFetch. + const forwardResponse = await httpNetworkFetch( + httpFetchParams, + includeCredentials, + isNewConnectionFetch + ) + + // 3. If httpRequest’s method is unsafe and forwardResponse’s status is + // in the range 200 to 399, inclusive, invalidate appropriate stored + // responses in httpCache, as per the "Invalidation" chapter of HTTP + // Caching, and set storedResponse to null. [HTTP-CACHING] + if ( + !safeMethodsSet.has(httpRequest.method) && + forwardResponse.status >= 200 && + forwardResponse.status <= 399 + ) { + // TODO: cache + } + + // 4. If the revalidatingFlag is set and forwardResponse’s status is 304, + // then: + if (revalidatingFlag && forwardResponse.status === 304) { + // TODO: cache + } + + // 5. If response is null, then: + if (response == null) { + // 1. Set response to forwardResponse. + response = forwardResponse + + // 2. Store httpRequest and forwardResponse in httpCache, as per the + // "Storing Responses in Caches" chapter of HTTP Caching. [HTTP-CACHING] + // TODO: cache + } + } + + // 11. Set response’s URL list to a clone of httpRequest’s URL list. + response.urlList = [...httpRequest.urlList] + + // 12. If httpRequest’s header list contains `Range`, then set response’s + // range-requested flag. + if (httpRequest.headersList.contains('range')) { + response.rangeRequested = true + } + + // 13. Set response’s request-includes-credentials to includeCredentials. + response.requestIncludesCredentials = includeCredentials + + // 14. If response’s status is 401, httpRequest’s response tainting is not + // "cors", includeCredentials is true, and request’s window is an environment + // settings object, then: + // TODO + + // 15. If response’s status is 407, then: + if (response.status === 407) { + // 1. If request’s window is "no-window", then return a network error. + if (request.window === 'no-window') { + return makeNetworkError() + } + + // 2. ??? + + // 3. If fetchParams is canceled, then return the appropriate network error for fetchParams. + if (isCancelled(fetchParams)) { + return makeAppropriateNetworkError(fetchParams) + } + + // 4. Prompt the end user as appropriate in request’s window and store + // the result as a proxy-authentication entry. [HTTP-AUTH] + // TODO: Invoke some kind of callback? + + // 5. Set response to the result of running HTTP-network-or-cache fetch given + // fetchParams. + // TODO + return makeNetworkError('proxy authentication required') + } + + // 16. If all of the following are true + if ( + // response’s status is 421 + response.status === 421 && + // isNewConnectionFetch is false + !isNewConnectionFetch && + // request’s body is null, or request’s body is non-null and request’s body’s source is non-null + (request.body == null || request.body.source != null) + ) { + // then: + + // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams. + if (isCancelled(fetchParams)) { + return makeAppropriateNetworkError(fetchParams) + } + + // 2. Set response to the result of running HTTP-network-or-cache + // fetch given fetchParams, isAuthenticationFetch, and true. + + // TODO (spec): The spec doesn't specify this but we need to cancel + // the active response before we can start a new one. + // https://github.com/whatwg/fetch/issues/1293 + fetchParams.controller.connection.destroy() + + response = await httpNetworkOrCacheFetch( + fetchParams, + isAuthenticationFetch, + true + ) + } + + // 17. If isAuthenticationFetch is true, then create an authentication entry + if (isAuthenticationFetch) { + // TODO + } + + // 18. Return response. + return response +} + +// https://fetch.spec.whatwg.org/#http-network-fetch +async function httpNetworkFetch ( + fetchParams, + includeCredentials = false, + forceNewConnection = false +) { + assert(!fetchParams.controller.connection || fetchParams.controller.connection.destroyed) + + fetchParams.controller.connection = { + abort: null, + destroyed: false, + destroy (err) { + if (!this.destroyed) { + this.destroyed = true + this.abort?.(err ?? new DOMException('The operation was aborted.', 'AbortError')) + } + } + } + + // 1. Let request be fetchParams’s request. + const request = fetchParams.request + + // 2. Let response be null. + let response = null + + // 3. Let timingInfo be fetchParams’s timing info. + const timingInfo = fetchParams.timingInfo + + // 4. Let httpCache be the result of determining the HTTP cache partition, + // given request. + // TODO: cache + const httpCache = null + + // 5. If httpCache is null, then set request’s cache mode to "no-store". + if (httpCache == null) { + request.cache = 'no-store' + } + + // 6. Let networkPartitionKey be the result of determining the network + // partition key given request. + // TODO + + // 7. Let newConnection be "yes" if forceNewConnection is true; otherwise + // "no". + const newConnection = forceNewConnection ? 'yes' : 'no' // eslint-disable-line no-unused-vars + + // 8. Switch on request’s mode: + if (request.mode === 'websocket') { + // Let connection be the result of obtaining a WebSocket connection, + // given request’s current URL. + // TODO + } else { + // Let connection be the result of obtaining a connection, given + // networkPartitionKey, request’s current URL’s origin, + // includeCredentials, and forceNewConnection. + // TODO + } + + // 9. Run these steps, but abort when the ongoing fetch is terminated: + + // 1. If connection is failure, then return a network error. + + // 2. Set timingInfo’s final connection timing info to the result of + // calling clamp and coarsen connection timing info with connection’s + // timing info, timingInfo’s post-redirect start time, and fetchParams’s + // cross-origin isolated capability. + + // 3. If connection is not an HTTP/2 connection, request’s body is non-null, + // and request’s body’s source is null, then append (`Transfer-Encoding`, + // `chunked`) to request’s header list. + + // 4. Set timingInfo’s final network-request start time to the coarsened + // shared current time given fetchParams’s cross-origin isolated + // capability. + + // 5. Set response to the result of making an HTTP request over connection + // using request with the following caveats: + + // - Follow the relevant requirements from HTTP. [HTTP] [HTTP-SEMANTICS] + // [HTTP-COND] [HTTP-CACHING] [HTTP-AUTH] + + // - If request’s body is non-null, and request’s body’s source is null, + // then the user agent may have a buffer of up to 64 kibibytes and store + // a part of request’s body in that buffer. If the user agent reads from + // request’s body beyond that buffer’s size and the user agent needs to + // resend request, then instead return a network error. + + // - Set timingInfo’s final network-response start time to the coarsened + // shared current time given fetchParams’s cross-origin isolated capability, + // immediately after the user agent’s HTTP parser receives the first byte + // of the response (e.g., frame header bytes for HTTP/2 or response status + // line for HTTP/1.x). + + // - Wait until all the headers are transmitted. + + // - Any responses whose status is in the range 100 to 199, inclusive, + // and is not 101, are to be ignored, except for the purposes of setting + // timingInfo’s final network-response start time above. + + // - If request’s header list contains `Transfer-Encoding`/`chunked` and + // response is transferred via HTTP/1.0 or older, then return a network + // error. + + // - If the HTTP request results in a TLS client certificate dialog, then: + + // 1. If request’s window is an environment settings object, make the + // dialog available in request’s window. + + // 2. Otherwise, return a network error. + + // To transmit request’s body body, run these steps: + let requestBody = null + // 1. If body is null and fetchParams’s process request end-of-body is + // non-null, then queue a fetch task given fetchParams’s process request + // end-of-body and fetchParams’s task destination. + if (request.body == null && fetchParams.processRequestEndOfBody) { + queueMicrotask(() => fetchParams.processRequestEndOfBody()) + } else if (request.body != null) { + // 2. Otherwise, if body is non-null: + + // 1. Let processBodyChunk given bytes be these steps: + const processBodyChunk = async function * (bytes) { + // 1. If the ongoing fetch is terminated, then abort these steps. + if (isCancelled(fetchParams)) { + return + } + + // 2. Run this step in parallel: transmit bytes. + yield bytes + + // 3. If fetchParams’s process request body is non-null, then run + // fetchParams’s process request body given bytes’s length. + fetchParams.processRequestBodyChunkLength?.(bytes.byteLength) + } + + // 2. Let processEndOfBody be these steps: + const processEndOfBody = () => { + // 1. If fetchParams is canceled, then abort these steps. + if (isCancelled(fetchParams)) { + return + } + + // 2. If fetchParams’s process request end-of-body is non-null, + // then run fetchParams’s process request end-of-body. + if (fetchParams.processRequestEndOfBody) { + fetchParams.processRequestEndOfBody() + } + } + + // 3. Let processBodyError given e be these steps: + const processBodyError = (e) => { + // 1. If fetchParams is canceled, then abort these steps. + if (isCancelled(fetchParams)) { + return + } + + // 2. If e is an "AbortError" DOMException, then abort fetchParams’s controller. + if (e.name === 'AbortError') { + fetchParams.controller.abort() + } else { + fetchParams.controller.terminate(e) + } + } + + // 4. Incrementally read request’s body given processBodyChunk, processEndOfBody, + // processBodyError, and fetchParams’s task destination. + requestBody = (async function * () { + try { + for await (const bytes of request.body.stream) { + yield * processBodyChunk(bytes) + } + processEndOfBody() + } catch (err) { + processBodyError(err) + } + })() + } + + try { + // socket is only provided for websockets + const { body, status, statusText, headersList, socket } = await dispatch({ body: requestBody }) + + if (socket) { + response = makeResponse({ status, statusText, headersList, socket }) + } else { + const iterator = body[Symbol.asyncIterator]() + fetchParams.controller.next = () => iterator.next() + + response = makeResponse({ status, statusText, headersList }) + } + } catch (err) { + // 10. If aborted, then: + if (err.name === 'AbortError') { + // 1. If connection uses HTTP/2, then transmit an RST_STREAM frame. + fetchParams.controller.connection.destroy() + + // 2. Return the appropriate network error for fetchParams. + return makeAppropriateNetworkError(fetchParams, err) + } + + return makeNetworkError(err) + } + + // 11. Let pullAlgorithm be an action that resumes the ongoing fetch + // if it is suspended. + const pullAlgorithm = () => { + fetchParams.controller.resume() + } + + // 12. Let cancelAlgorithm be an algorithm that aborts fetchParams’s + // controller with reason, given reason. + const cancelAlgorithm = (reason) => { + fetchParams.controller.abort(reason) + } + + // 13. Let highWaterMark be a non-negative, non-NaN number, chosen by + // the user agent. + // TODO + + // 14. Let sizeAlgorithm be an algorithm that accepts a chunk object + // and returns a non-negative, non-NaN, non-infinite number, chosen by the user agent. + // TODO + + // 15. Let stream be a new ReadableStream. + // 16. Set up stream with pullAlgorithm set to pullAlgorithm, + // cancelAlgorithm set to cancelAlgorithm, highWaterMark set to + // highWaterMark, and sizeAlgorithm set to sizeAlgorithm. + if (!ReadableStream) { + ReadableStream = (__nccwpck_require__(3774).ReadableStream) + } + + const stream = new ReadableStream( + { + async start (controller) { + fetchParams.controller.controller = controller + }, + async pull (controller) { + await pullAlgorithm(controller) + }, + async cancel (reason) { + await cancelAlgorithm(reason) + } + }, + { + highWaterMark: 0, + size () { + return 1 + } + } + ) + + // 17. Run these steps, but abort when the ongoing fetch is terminated: + + // 1. Set response’s body to a new body whose stream is stream. + response.body = { stream } + + // 2. If response is not a network error and request’s cache mode is + // not "no-store", then update response in httpCache for request. + // TODO + + // 3. If includeCredentials is true and the user agent is not configured + // to block cookies for request (see section 7 of [COOKIES]), then run the + // "set-cookie-string" parsing algorithm (see section 5.2 of [COOKIES]) on + // the value of each header whose name is a byte-case-insensitive match for + // `Set-Cookie` in response’s header list, if any, and request’s current URL. + // TODO + + // 18. If aborted, then: + // TODO + + // 19. Run these steps in parallel: + + // 1. Run these steps, but abort when fetchParams is canceled: + fetchParams.controller.on('terminated', onAborted) + fetchParams.controller.resume = async () => { + // 1. While true + while (true) { + // 1-3. See onData... + + // 4. Set bytes to the result of handling content codings given + // codings and bytes. + let bytes + let isFailure + try { + const { done, value } = await fetchParams.controller.next() + + if (isAborted(fetchParams)) { + break + } + + bytes = done ? undefined : value + } catch (err) { + if (fetchParams.controller.ended && !timingInfo.encodedBodySize) { + // zlib doesn't like empty streams. + bytes = undefined + } else { + bytes = err + + // err may be propagated from the result of calling readablestream.cancel, + // which might not be an error. https://github.com/nodejs/undici/issues/2009 + isFailure = true + } + } + + if (bytes === undefined) { + // 2. Otherwise, if the bytes transmission for response’s message + // body is done normally and stream is readable, then close + // stream, finalize response for fetchParams and response, and + // abort these in-parallel steps. + readableStreamClose(fetchParams.controller.controller) + + finalizeResponse(fetchParams, response) + + return + } + + // 5. Increase timingInfo’s decoded body size by bytes’s length. + timingInfo.decodedBodySize += bytes?.byteLength ?? 0 + + // 6. If bytes is failure, then terminate fetchParams’s controller. + if (isFailure) { + fetchParams.controller.terminate(bytes) + return + } + + // 7. Enqueue a Uint8Array wrapping an ArrayBuffer containing bytes + // into stream. + fetchParams.controller.controller.enqueue(new Uint8Array(bytes)) + + // 8. If stream is errored, then terminate the ongoing fetch. + if (isErrored(stream)) { + fetchParams.controller.terminate() + return + } + + // 9. If stream doesn’t need more data ask the user agent to suspend + // the ongoing fetch. + if (!fetchParams.controller.controller.desiredSize) { + return + } + } + } + + // 2. If aborted, then: + function onAborted (reason) { + // 2. If fetchParams is aborted, then: + if (isAborted(fetchParams)) { + // 1. Set response’s aborted flag. + response.aborted = true + + // 2. If stream is readable, then error stream with the result of + // deserialize a serialized abort reason given fetchParams’s + // controller’s serialized abort reason and an + // implementation-defined realm. + if (isReadable(stream)) { + fetchParams.controller.controller.error( + fetchParams.controller.serializedAbortReason + ) + } + } else { + // 3. Otherwise, if stream is readable, error stream with a TypeError. + if (isReadable(stream)) { + fetchParams.controller.controller.error(new TypeError('terminated', { + cause: isErrorLike(reason) ? reason : undefined + })) + } + } + + // 4. If connection uses HTTP/2, then transmit an RST_STREAM frame. + // 5. Otherwise, the user agent should close connection unless it would be bad for performance to do so. + fetchParams.controller.connection.destroy() + } + + // 20. Return response. + return response + + async function dispatch ({ body }) { + const url = requestCurrentURL(request) + /** @type {import('../..').Agent} */ + const agent = fetchParams.controller.dispatcher + + return new Promise((resolve, reject) => agent.dispatch( + { + path: url.pathname + url.search, + origin: url.origin, + method: request.method, + body: fetchParams.controller.dispatcher.isMockActive ? request.body && (request.body.source || request.body.stream) : body, + headers: request.headersList.entries, + maxRedirections: 0, + upgrade: request.mode === 'websocket' ? 'websocket' : undefined + }, + { + body: null, + abort: null, + + onConnect (abort) { + // TODO (fix): Do we need connection here? + const { connection } = fetchParams.controller + + if (connection.destroyed) { + abort(new DOMException('The operation was aborted.', 'AbortError')) + } else { + fetchParams.controller.on('terminated', abort) + this.abort = connection.abort = abort + } + }, + + onHeaders (status, headersList, resume, statusText) { + if (status < 200) { + return + } + + let codings = [] + let location = '' + + const headers = new Headers() + + // For H2, the headers are a plain JS object + // We distinguish between them and iterate accordingly + if (Array.isArray(headersList)) { + for (let n = 0; n < headersList.length; n += 2) { + const key = headersList[n + 0].toString('latin1') + const val = headersList[n + 1].toString('latin1') + if (key.toLowerCase() === 'content-encoding') { + // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1 + // "All content-coding values are case-insensitive..." + codings = val.toLowerCase().split(',').map((x) => x.trim()) + } else if (key.toLowerCase() === 'location') { + location = val + } + + headers[kHeadersList].append(key, val) + } + } else { + const keys = Object.keys(headersList) + for (const key of keys) { + const val = headersList[key] + if (key.toLowerCase() === 'content-encoding') { + // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1 + // "All content-coding values are case-insensitive..." + codings = val.toLowerCase().split(',').map((x) => x.trim()).reverse() + } else if (key.toLowerCase() === 'location') { + location = val + } + + headers[kHeadersList].append(key, val) + } + } + + this.body = new Readable({ read: resume }) + + const decoders = [] + + const willFollow = request.redirect === 'follow' && + location && + redirectStatusSet.has(status) + + // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding + if (request.method !== 'HEAD' && request.method !== 'CONNECT' && !nullBodyStatus.includes(status) && !willFollow) { + for (const coding of codings) { + // https://www.rfc-editor.org/rfc/rfc9112.html#section-7.2 + if (coding === 'x-gzip' || coding === 'gzip') { + decoders.push(zlib.createGunzip({ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + flush: zlib.constants.Z_SYNC_FLUSH, + finishFlush: zlib.constants.Z_SYNC_FLUSH + })) + } else if (coding === 'deflate') { + decoders.push(zlib.createInflate()) + } else if (coding === 'br') { + decoders.push(zlib.createBrotliDecompress()) + } else { + decoders.length = 0 + break + } + } + } + + resolve({ + status, + statusText, + headersList: headers[kHeadersList], + body: decoders.length + ? pipeline(this.body, ...decoders, () => { }) + : this.body.on('error', () => {}) + }) + + return true + }, + + onData (chunk) { + if (fetchParams.controller.dump) { + return + } + + // 1. If one or more bytes have been transmitted from response’s + // message body, then: + + // 1. Let bytes be the transmitted bytes. + const bytes = chunk + + // 2. Let codings be the result of extracting header list values + // given `Content-Encoding` and response’s header list. + // See pullAlgorithm. + + // 3. Increase timingInfo’s encoded body size by bytes’s length. + timingInfo.encodedBodySize += bytes.byteLength + + // 4. See pullAlgorithm... + + return this.body.push(bytes) + }, + + onComplete () { + if (this.abort) { + fetchParams.controller.off('terminated', this.abort) + } + + fetchParams.controller.ended = true + + this.body.push(null) + }, + + onError (error) { + if (this.abort) { + fetchParams.controller.off('terminated', this.abort) + } + + this.body?.destroy(error) + + fetchParams.controller.terminate(error) + + reject(error) + }, + + onUpgrade (status, headersList, socket) { + if (status !== 101) { + return + } + + const headers = new Headers() + + for (let n = 0; n < headersList.length; n += 2) { + const key = headersList[n + 0].toString('latin1') + const val = headersList[n + 1].toString('latin1') + + headers[kHeadersList].append(key, val) + } + + resolve({ + status, + statusText: STATUS_CODES[status], + headersList: headers[kHeadersList], + socket + }) + + return true + } + } + )) + } +} + +module.exports = { + fetch, + Fetch, + fetching, + finalizeAndReportTiming +} + + +/***/ }), + +/***/ 5194: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +/* globals AbortController */ + + + +const { extractBody, mixinBody, cloneBody } = __nccwpck_require__(8923) +const { Headers, fill: fillHeaders, HeadersList } = __nccwpck_require__(6349) +const { FinalizationRegistry } = __nccwpck_require__(3194)() +const util = __nccwpck_require__(3440) +const { + isValidHTTPToken, + sameOrigin, + normalizeMethod, + makePolicyContainer, + normalizeMethodRecord +} = __nccwpck_require__(5523) +const { + forbiddenMethodsSet, + corsSafeListedMethodsSet, + referrerPolicy, + requestRedirect, + requestMode, + requestCredentials, + requestCache, + requestDuplex +} = __nccwpck_require__(7326) +const { kEnumerableProperty } = util +const { kHeaders, kSignal, kState, kGuard, kRealm } = __nccwpck_require__(9710) +const { webidl } = __nccwpck_require__(4222) +const { getGlobalOrigin } = __nccwpck_require__(5628) +const { URLSerializer } = __nccwpck_require__(4322) +const { kHeadersList, kConstruct } = __nccwpck_require__(6443) +const assert = __nccwpck_require__(2613) +const { getMaxListeners, setMaxListeners, getEventListeners, defaultMaxListeners } = __nccwpck_require__(4434) + +let TransformStream = globalThis.TransformStream + +const kAbortController = Symbol('abortController') + +const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => { + signal.removeEventListener('abort', abort) +}) + +// https://fetch.spec.whatwg.org/#request-class +class Request { + // https://fetch.spec.whatwg.org/#dom-request + constructor (input, init = {}) { + if (input === kConstruct) { + return + } + + webidl.argumentLengthCheck(arguments, 1, { header: 'Request constructor' }) + + input = webidl.converters.RequestInfo(input) + init = webidl.converters.RequestInit(init) + + // https://html.spec.whatwg.org/multipage/webappapis.html#environment-settings-object + this[kRealm] = { + settingsObject: { + baseUrl: getGlobalOrigin(), + get origin () { + return this.baseUrl?.origin + }, + policyContainer: makePolicyContainer() + } + } + + // 1. Let request be null. + let request = null + + // 2. Let fallbackMode be null. + let fallbackMode = null + + // 3. Let baseURL be this’s relevant settings object’s API base URL. + const baseUrl = this[kRealm].settingsObject.baseUrl + + // 4. Let signal be null. + let signal = null + + // 5. If input is a string, then: + if (typeof input === 'string') { + // 1. Let parsedURL be the result of parsing input with baseURL. + // 2. If parsedURL is failure, then throw a TypeError. + let parsedURL + try { + parsedURL = new URL(input, baseUrl) + } catch (err) { + throw new TypeError('Failed to parse URL from ' + input, { cause: err }) + } + + // 3. If parsedURL includes credentials, then throw a TypeError. + if (parsedURL.username || parsedURL.password) { + throw new TypeError( + 'Request cannot be constructed from a URL that includes credentials: ' + + input + ) + } + + // 4. Set request to a new request whose URL is parsedURL. + request = makeRequest({ urlList: [parsedURL] }) + + // 5. Set fallbackMode to "cors". + fallbackMode = 'cors' + } else { + // 6. Otherwise: + + // 7. Assert: input is a Request object. + assert(input instanceof Request) + + // 8. Set request to input’s request. + request = input[kState] + + // 9. Set signal to input’s signal. + signal = input[kSignal] + } + + // 7. Let origin be this’s relevant settings object’s origin. + const origin = this[kRealm].settingsObject.origin + + // 8. Let window be "client". + let window = 'client' + + // 9. If request’s window is an environment settings object and its origin + // is same origin with origin, then set window to request’s window. + if ( + request.window?.constructor?.name === 'EnvironmentSettingsObject' && + sameOrigin(request.window, origin) + ) { + window = request.window + } + + // 10. If init["window"] exists and is non-null, then throw a TypeError. + if (init.window != null) { + throw new TypeError(`'window' option '${window}' must be null`) + } + + // 11. If init["window"] exists, then set window to "no-window". + if ('window' in init) { + window = 'no-window' + } + + // 12. Set request to a new request with the following properties: + request = makeRequest({ + // URL request’s URL. + // undici implementation note: this is set as the first item in request's urlList in makeRequest + // method request’s method. + method: request.method, + // header list A copy of request’s header list. + // undici implementation note: headersList is cloned in makeRequest + headersList: request.headersList, + // unsafe-request flag Set. + unsafeRequest: request.unsafeRequest, + // client This’s relevant settings object. + client: this[kRealm].settingsObject, + // window window. + window, + // priority request’s priority. + priority: request.priority, + // origin request’s origin. The propagation of the origin is only significant for navigation requests + // being handled by a service worker. In this scenario a request can have an origin that is different + // from the current client. + origin: request.origin, + // referrer request’s referrer. + referrer: request.referrer, + // referrer policy request’s referrer policy. + referrerPolicy: request.referrerPolicy, + // mode request’s mode. + mode: request.mode, + // credentials mode request’s credentials mode. + credentials: request.credentials, + // cache mode request’s cache mode. + cache: request.cache, + // redirect mode request’s redirect mode. + redirect: request.redirect, + // integrity metadata request’s integrity metadata. + integrity: request.integrity, + // keepalive request’s keepalive. + keepalive: request.keepalive, + // reload-navigation flag request’s reload-navigation flag. + reloadNavigation: request.reloadNavigation, + // history-navigation flag request’s history-navigation flag. + historyNavigation: request.historyNavigation, + // URL list A clone of request’s URL list. + urlList: [...request.urlList] + }) + + const initHasKey = Object.keys(init).length !== 0 + + // 13. If init is not empty, then: + if (initHasKey) { + // 1. If request’s mode is "navigate", then set it to "same-origin". + if (request.mode === 'navigate') { + request.mode = 'same-origin' + } + + // 2. Unset request’s reload-navigation flag. + request.reloadNavigation = false + + // 3. Unset request’s history-navigation flag. + request.historyNavigation = false + + // 4. Set request’s origin to "client". + request.origin = 'client' + + // 5. Set request’s referrer to "client" + request.referrer = 'client' + + // 6. Set request’s referrer policy to the empty string. + request.referrerPolicy = '' + + // 7. Set request’s URL to request’s current URL. + request.url = request.urlList[request.urlList.length - 1] + + // 8. Set request’s URL list to « request’s URL ». + request.urlList = [request.url] + } + + // 14. If init["referrer"] exists, then: + if (init.referrer !== undefined) { + // 1. Let referrer be init["referrer"]. + const referrer = init.referrer + + // 2. If referrer is the empty string, then set request’s referrer to "no-referrer". + if (referrer === '') { + request.referrer = 'no-referrer' + } else { + // 1. Let parsedReferrer be the result of parsing referrer with + // baseURL. + // 2. If parsedReferrer is failure, then throw a TypeError. + let parsedReferrer + try { + parsedReferrer = new URL(referrer, baseUrl) + } catch (err) { + throw new TypeError(`Referrer "${referrer}" is not a valid URL.`, { cause: err }) + } + + // 3. If one of the following is true + // - parsedReferrer’s scheme is "about" and path is the string "client" + // - parsedReferrer’s origin is not same origin with origin + // then set request’s referrer to "client". + if ( + (parsedReferrer.protocol === 'about:' && parsedReferrer.hostname === 'client') || + (origin && !sameOrigin(parsedReferrer, this[kRealm].settingsObject.baseUrl)) + ) { + request.referrer = 'client' + } else { + // 4. Otherwise, set request’s referrer to parsedReferrer. + request.referrer = parsedReferrer + } + } + } + + // 15. If init["referrerPolicy"] exists, then set request’s referrer policy + // to it. + if (init.referrerPolicy !== undefined) { + request.referrerPolicy = init.referrerPolicy + } + + // 16. Let mode be init["mode"] if it exists, and fallbackMode otherwise. + let mode + if (init.mode !== undefined) { + mode = init.mode + } else { + mode = fallbackMode + } + + // 17. If mode is "navigate", then throw a TypeError. + if (mode === 'navigate') { + throw webidl.errors.exception({ + header: 'Request constructor', + message: 'invalid request mode navigate.' + }) + } + + // 18. If mode is non-null, set request’s mode to mode. + if (mode != null) { + request.mode = mode + } + + // 19. If init["credentials"] exists, then set request’s credentials mode + // to it. + if (init.credentials !== undefined) { + request.credentials = init.credentials + } + + // 18. If init["cache"] exists, then set request’s cache mode to it. + if (init.cache !== undefined) { + request.cache = init.cache + } + + // 21. If request’s cache mode is "only-if-cached" and request’s mode is + // not "same-origin", then throw a TypeError. + if (request.cache === 'only-if-cached' && request.mode !== 'same-origin') { + throw new TypeError( + "'only-if-cached' can be set only with 'same-origin' mode" + ) + } + + // 22. If init["redirect"] exists, then set request’s redirect mode to it. + if (init.redirect !== undefined) { + request.redirect = init.redirect + } + + // 23. If init["integrity"] exists, then set request’s integrity metadata to it. + if (init.integrity != null) { + request.integrity = String(init.integrity) + } + + // 24. If init["keepalive"] exists, then set request’s keepalive to it. + if (init.keepalive !== undefined) { + request.keepalive = Boolean(init.keepalive) + } + + // 25. If init["method"] exists, then: + if (init.method !== undefined) { + // 1. Let method be init["method"]. + let method = init.method + + // 2. If method is not a method or method is a forbidden method, then + // throw a TypeError. + if (!isValidHTTPToken(method)) { + throw new TypeError(`'${method}' is not a valid HTTP method.`) + } + + if (forbiddenMethodsSet.has(method.toUpperCase())) { + throw new TypeError(`'${method}' HTTP method is unsupported.`) + } + + // 3. Normalize method. + method = normalizeMethodRecord[method] ?? normalizeMethod(method) + + // 4. Set request’s method to method. + request.method = method + } + + // 26. If init["signal"] exists, then set signal to it. + if (init.signal !== undefined) { + signal = init.signal + } + + // 27. Set this’s request to request. + this[kState] = request + + // 28. Set this’s signal to a new AbortSignal object with this’s relevant + // Realm. + // TODO: could this be simplified with AbortSignal.any + // (https://dom.spec.whatwg.org/#dom-abortsignal-any) + const ac = new AbortController() + this[kSignal] = ac.signal + this[kSignal][kRealm] = this[kRealm] + + // 29. If signal is not null, then make this’s signal follow signal. + if (signal != null) { + if ( + !signal || + typeof signal.aborted !== 'boolean' || + typeof signal.addEventListener !== 'function' + ) { + throw new TypeError( + "Failed to construct 'Request': member signal is not of type AbortSignal." + ) + } + + if (signal.aborted) { + ac.abort(signal.reason) + } else { + // Keep a strong ref to ac while request object + // is alive. This is needed to prevent AbortController + // from being prematurely garbage collected. + // See, https://github.com/nodejs/undici/issues/1926. + this[kAbortController] = ac + + const acRef = new WeakRef(ac) + const abort = function () { + const ac = acRef.deref() + if (ac !== undefined) { + ac.abort(this.reason) + } + } + + // Third-party AbortControllers may not work with these. + // See, https://github.com/nodejs/undici/pull/1910#issuecomment-1464495619. + try { + // If the max amount of listeners is equal to the default, increase it + // This is only available in node >= v19.9.0 + if (typeof getMaxListeners === 'function' && getMaxListeners(signal) === defaultMaxListeners) { + setMaxListeners(100, signal) + } else if (getEventListeners(signal, 'abort').length >= defaultMaxListeners) { + setMaxListeners(100, signal) + } + } catch {} + + util.addAbortListener(signal, abort) + requestFinalizer.register(ac, { signal, abort }) + } + } + + // 30. Set this’s headers to a new Headers object with this’s relevant + // Realm, whose header list is request’s header list and guard is + // "request". + this[kHeaders] = new Headers(kConstruct) + this[kHeaders][kHeadersList] = request.headersList + this[kHeaders][kGuard] = 'request' + this[kHeaders][kRealm] = this[kRealm] + + // 31. If this’s request’s mode is "no-cors", then: + if (mode === 'no-cors') { + // 1. If this’s request’s method is not a CORS-safelisted method, + // then throw a TypeError. + if (!corsSafeListedMethodsSet.has(request.method)) { + throw new TypeError( + `'${request.method} is unsupported in no-cors mode.` + ) + } + + // 2. Set this’s headers’s guard to "request-no-cors". + this[kHeaders][kGuard] = 'request-no-cors' + } + + // 32. If init is not empty, then: + if (initHasKey) { + /** @type {HeadersList} */ + const headersList = this[kHeaders][kHeadersList] + // 1. Let headers be a copy of this’s headers and its associated header + // list. + // 2. If init["headers"] exists, then set headers to init["headers"]. + const headers = init.headers !== undefined ? init.headers : new HeadersList(headersList) + + // 3. Empty this’s headers’s header list. + headersList.clear() + + // 4. If headers is a Headers object, then for each header in its header + // list, append header’s name/header’s value to this’s headers. + if (headers instanceof HeadersList) { + for (const [key, val] of headers) { + headersList.append(key, val) + } + // Note: Copy the `set-cookie` meta-data. + headersList.cookies = headers.cookies + } else { + // 5. Otherwise, fill this’s headers with headers. + fillHeaders(this[kHeaders], headers) + } + } + + // 33. Let inputBody be input’s request’s body if input is a Request + // object; otherwise null. + const inputBody = input instanceof Request ? input[kState].body : null + + // 34. If either init["body"] exists and is non-null or inputBody is + // non-null, and request’s method is `GET` or `HEAD`, then throw a + // TypeError. + if ( + (init.body != null || inputBody != null) && + (request.method === 'GET' || request.method === 'HEAD') + ) { + throw new TypeError('Request with GET/HEAD method cannot have body.') + } + + // 35. Let initBody be null. + let initBody = null + + // 36. If init["body"] exists and is non-null, then: + if (init.body != null) { + // 1. Let Content-Type be null. + // 2. Set initBody and Content-Type to the result of extracting + // init["body"], with keepalive set to request’s keepalive. + const [extractedBody, contentType] = extractBody( + init.body, + request.keepalive + ) + initBody = extractedBody + + // 3, If Content-Type is non-null and this’s headers’s header list does + // not contain `Content-Type`, then append `Content-Type`/Content-Type to + // this’s headers. + if (contentType && !this[kHeaders][kHeadersList].contains('content-type')) { + this[kHeaders].append('content-type', contentType) + } + } + + // 37. Let inputOrInitBody be initBody if it is non-null; otherwise + // inputBody. + const inputOrInitBody = initBody ?? inputBody + + // 38. If inputOrInitBody is non-null and inputOrInitBody’s source is + // null, then: + if (inputOrInitBody != null && inputOrInitBody.source == null) { + // 1. If initBody is non-null and init["duplex"] does not exist, + // then throw a TypeError. + if (initBody != null && init.duplex == null) { + throw new TypeError('RequestInit: duplex option is required when sending a body.') + } + + // 2. If this’s request’s mode is neither "same-origin" nor "cors", + // then throw a TypeError. + if (request.mode !== 'same-origin' && request.mode !== 'cors') { + throw new TypeError( + 'If request is made from ReadableStream, mode should be "same-origin" or "cors"' + ) + } + + // 3. Set this’s request’s use-CORS-preflight flag. + request.useCORSPreflightFlag = true + } + + // 39. Let finalBody be inputOrInitBody. + let finalBody = inputOrInitBody + + // 40. If initBody is null and inputBody is non-null, then: + if (initBody == null && inputBody != null) { + // 1. If input is unusable, then throw a TypeError. + if (util.isDisturbed(inputBody.stream) || inputBody.stream.locked) { + throw new TypeError( + 'Cannot construct a Request with a Request object that has already been used.' + ) + } + + // 2. Set finalBody to the result of creating a proxy for inputBody. + if (!TransformStream) { + TransformStream = (__nccwpck_require__(3774).TransformStream) + } + + // https://streams.spec.whatwg.org/#readablestream-create-a-proxy + const identityTransform = new TransformStream() + inputBody.stream.pipeThrough(identityTransform) + finalBody = { + source: inputBody.source, + length: inputBody.length, + stream: identityTransform.readable + } + } + + // 41. Set this’s request’s body to finalBody. + this[kState].body = finalBody + } + + // Returns request’s HTTP method, which is "GET" by default. + get method () { + webidl.brandCheck(this, Request) + + // The method getter steps are to return this’s request’s method. + return this[kState].method + } + + // Returns the URL of request as a string. + get url () { + webidl.brandCheck(this, Request) + + // The url getter steps are to return this’s request’s URL, serialized. + return URLSerializer(this[kState].url) + } + + // Returns a Headers object consisting of the headers associated with request. + // Note that headers added in the network layer by the user agent will not + // be accounted for in this object, e.g., the "Host" header. + get headers () { + webidl.brandCheck(this, Request) + + // The headers getter steps are to return this’s headers. + return this[kHeaders] + } + + // Returns the kind of resource requested by request, e.g., "document" + // or "script". + get destination () { + webidl.brandCheck(this, Request) + + // The destination getter are to return this’s request’s destination. + return this[kState].destination + } + + // Returns the referrer of request. Its value can be a same-origin URL if + // explicitly set in init, the empty string to indicate no referrer, and + // "about:client" when defaulting to the global’s default. This is used + // during fetching to determine the value of the `Referer` header of the + // request being made. + get referrer () { + webidl.brandCheck(this, Request) + + // 1. If this’s request’s referrer is "no-referrer", then return the + // empty string. + if (this[kState].referrer === 'no-referrer') { + return '' + } + + // 2. If this’s request’s referrer is "client", then return + // "about:client". + if (this[kState].referrer === 'client') { + return 'about:client' + } + + // Return this’s request’s referrer, serialized. + return this[kState].referrer.toString() + } + + // Returns the referrer policy associated with request. + // This is used during fetching to compute the value of the request’s + // referrer. + get referrerPolicy () { + webidl.brandCheck(this, Request) + + // The referrerPolicy getter steps are to return this’s request’s referrer policy. + return this[kState].referrerPolicy + } + + // Returns the mode associated with request, which is a string indicating + // whether the request will use CORS, or will be restricted to same-origin + // URLs. + get mode () { + webidl.brandCheck(this, Request) + + // The mode getter steps are to return this’s request’s mode. + return this[kState].mode + } + + // Returns the credentials mode associated with request, + // which is a string indicating whether credentials will be sent with the + // request always, never, or only when sent to a same-origin URL. + get credentials () { + // The credentials getter steps are to return this’s request’s credentials mode. + return this[kState].credentials + } + + // Returns the cache mode associated with request, + // which is a string indicating how the request will + // interact with the browser’s cache when fetching. + get cache () { + webidl.brandCheck(this, Request) + + // The cache getter steps are to return this’s request’s cache mode. + return this[kState].cache + } + + // Returns the redirect mode associated with request, + // which is a string indicating how redirects for the + // request will be handled during fetching. A request + // will follow redirects by default. + get redirect () { + webidl.brandCheck(this, Request) + + // The redirect getter steps are to return this’s request’s redirect mode. + return this[kState].redirect + } + + // Returns request’s subresource integrity metadata, which is a + // cryptographic hash of the resource being fetched. Its value + // consists of multiple hashes separated by whitespace. [SRI] + get integrity () { + webidl.brandCheck(this, Request) + + // The integrity getter steps are to return this’s request’s integrity + // metadata. + return this[kState].integrity + } + + // Returns a boolean indicating whether or not request can outlive the + // global in which it was created. + get keepalive () { + webidl.brandCheck(this, Request) + + // The keepalive getter steps are to return this’s request’s keepalive. + return this[kState].keepalive + } + + // Returns a boolean indicating whether or not request is for a reload + // navigation. + get isReloadNavigation () { + webidl.brandCheck(this, Request) + + // The isReloadNavigation getter steps are to return true if this’s + // request’s reload-navigation flag is set; otherwise false. + return this[kState].reloadNavigation + } + + // Returns a boolean indicating whether or not request is for a history + // navigation (a.k.a. back-foward navigation). + get isHistoryNavigation () { + webidl.brandCheck(this, Request) + + // The isHistoryNavigation getter steps are to return true if this’s request’s + // history-navigation flag is set; otherwise false. + return this[kState].historyNavigation + } + + // Returns the signal associated with request, which is an AbortSignal + // object indicating whether or not request has been aborted, and its + // abort event handler. + get signal () { + webidl.brandCheck(this, Request) + + // The signal getter steps are to return this’s signal. + return this[kSignal] + } + + get body () { + webidl.brandCheck(this, Request) + + return this[kState].body ? this[kState].body.stream : null + } + + get bodyUsed () { + webidl.brandCheck(this, Request) + + return !!this[kState].body && util.isDisturbed(this[kState].body.stream) + } + + get duplex () { + webidl.brandCheck(this, Request) + + return 'half' + } + + // Returns a clone of request. + clone () { + webidl.brandCheck(this, Request) + + // 1. If this is unusable, then throw a TypeError. + if (this.bodyUsed || this.body?.locked) { + throw new TypeError('unusable') + } + + // 2. Let clonedRequest be the result of cloning this’s request. + const clonedRequest = cloneRequest(this[kState]) + + // 3. Let clonedRequestObject be the result of creating a Request object, + // given clonedRequest, this’s headers’s guard, and this’s relevant Realm. + const clonedRequestObject = new Request(kConstruct) + clonedRequestObject[kState] = clonedRequest + clonedRequestObject[kRealm] = this[kRealm] + clonedRequestObject[kHeaders] = new Headers(kConstruct) + clonedRequestObject[kHeaders][kHeadersList] = clonedRequest.headersList + clonedRequestObject[kHeaders][kGuard] = this[kHeaders][kGuard] + clonedRequestObject[kHeaders][kRealm] = this[kHeaders][kRealm] + + // 4. Make clonedRequestObject’s signal follow this’s signal. + const ac = new AbortController() + if (this.signal.aborted) { + ac.abort(this.signal.reason) + } else { + util.addAbortListener( + this.signal, + () => { + ac.abort(this.signal.reason) + } + ) + } + clonedRequestObject[kSignal] = ac.signal + + // 4. Return clonedRequestObject. + return clonedRequestObject + } +} + +mixinBody(Request) + +function makeRequest (init) { + // https://fetch.spec.whatwg.org/#requests + const request = { + method: 'GET', + localURLsOnly: false, + unsafeRequest: false, + body: null, + client: null, + reservedClient: null, + replacesClientId: '', + window: 'client', + keepalive: false, + serviceWorkers: 'all', + initiator: '', + destination: '', + priority: null, + origin: 'client', + policyContainer: 'client', + referrer: 'client', + referrerPolicy: '', + mode: 'no-cors', + useCORSPreflightFlag: false, + credentials: 'same-origin', + useCredentials: false, + cache: 'default', + redirect: 'follow', + integrity: '', + cryptoGraphicsNonceMetadata: '', + parserMetadata: '', + reloadNavigation: false, + historyNavigation: false, + userActivation: false, + taintedOrigin: false, + redirectCount: 0, + responseTainting: 'basic', + preventNoCacheCacheControlHeaderModification: false, + done: false, + timingAllowFailed: false, + ...init, + headersList: init.headersList + ? new HeadersList(init.headersList) + : new HeadersList() + } + request.url = request.urlList[0] + return request +} + +// https://fetch.spec.whatwg.org/#concept-request-clone +function cloneRequest (request) { + // To clone a request request, run these steps: + + // 1. Let newRequest be a copy of request, except for its body. + const newRequest = makeRequest({ ...request, body: null }) + + // 2. If request’s body is non-null, set newRequest’s body to the + // result of cloning request’s body. + if (request.body != null) { + newRequest.body = cloneBody(request.body) + } + + // 3. Return newRequest. + return newRequest +} + +Object.defineProperties(Request.prototype, { + method: kEnumerableProperty, + url: kEnumerableProperty, + headers: kEnumerableProperty, + redirect: kEnumerableProperty, + clone: kEnumerableProperty, + signal: kEnumerableProperty, + duplex: kEnumerableProperty, + destination: kEnumerableProperty, + body: kEnumerableProperty, + bodyUsed: kEnumerableProperty, + isHistoryNavigation: kEnumerableProperty, + isReloadNavigation: kEnumerableProperty, + keepalive: kEnumerableProperty, + integrity: kEnumerableProperty, + cache: kEnumerableProperty, + credentials: kEnumerableProperty, + attribute: kEnumerableProperty, + referrerPolicy: kEnumerableProperty, + referrer: kEnumerableProperty, + mode: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'Request', + configurable: true + } +}) + +webidl.converters.Request = webidl.interfaceConverter( + Request +) + +// https://fetch.spec.whatwg.org/#requestinfo +webidl.converters.RequestInfo = function (V) { + if (typeof V === 'string') { + return webidl.converters.USVString(V) + } + + if (V instanceof Request) { + return webidl.converters.Request(V) + } + + return webidl.converters.USVString(V) +} + +webidl.converters.AbortSignal = webidl.interfaceConverter( + AbortSignal +) + +// https://fetch.spec.whatwg.org/#requestinit +webidl.converters.RequestInit = webidl.dictionaryConverter([ + { + key: 'method', + converter: webidl.converters.ByteString + }, + { + key: 'headers', + converter: webidl.converters.HeadersInit + }, + { + key: 'body', + converter: webidl.nullableConverter( + webidl.converters.BodyInit + ) + }, + { + key: 'referrer', + converter: webidl.converters.USVString + }, + { + key: 'referrerPolicy', + converter: webidl.converters.DOMString, + // https://w3c.github.io/webappsec-referrer-policy/#referrer-policy + allowedValues: referrerPolicy + }, + { + key: 'mode', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#concept-request-mode + allowedValues: requestMode + }, + { + key: 'credentials', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#requestcredentials + allowedValues: requestCredentials + }, + { + key: 'cache', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#requestcache + allowedValues: requestCache + }, + { + key: 'redirect', + converter: webidl.converters.DOMString, + // https://fetch.spec.whatwg.org/#requestredirect + allowedValues: requestRedirect + }, + { + key: 'integrity', + converter: webidl.converters.DOMString + }, + { + key: 'keepalive', + converter: webidl.converters.boolean + }, + { + key: 'signal', + converter: webidl.nullableConverter( + (signal) => webidl.converters.AbortSignal( + signal, + { strict: false } + ) + ) + }, + { + key: 'window', + converter: webidl.converters.any + }, + { + key: 'duplex', + converter: webidl.converters.DOMString, + allowedValues: requestDuplex + } +]) + +module.exports = { Request, makeRequest } + + +/***/ }), + +/***/ 8676: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { Headers, HeadersList, fill } = __nccwpck_require__(6349) +const { extractBody, cloneBody, mixinBody } = __nccwpck_require__(8923) +const util = __nccwpck_require__(3440) +const { kEnumerableProperty } = util +const { + isValidReasonPhrase, + isCancelled, + isAborted, + isBlobLike, + serializeJavascriptValueToJSONString, + isErrorLike, + isomorphicEncode +} = __nccwpck_require__(5523) +const { + redirectStatusSet, + nullBodyStatus, + DOMException +} = __nccwpck_require__(7326) +const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(9710) +const { webidl } = __nccwpck_require__(4222) +const { FormData } = __nccwpck_require__(3073) +const { getGlobalOrigin } = __nccwpck_require__(5628) +const { URLSerializer } = __nccwpck_require__(4322) +const { kHeadersList, kConstruct } = __nccwpck_require__(6443) +const assert = __nccwpck_require__(2613) +const { types } = __nccwpck_require__(9023) + +const ReadableStream = globalThis.ReadableStream || (__nccwpck_require__(3774).ReadableStream) +const textEncoder = new TextEncoder('utf-8') + +// https://fetch.spec.whatwg.org/#response-class +class Response { + // Creates network error Response. + static error () { + // TODO + const relevantRealm = { settingsObject: {} } + + // The static error() method steps are to return the result of creating a + // Response object, given a new network error, "immutable", and this’s + // relevant Realm. + const responseObject = new Response() + responseObject[kState] = makeNetworkError() + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kHeadersList] = responseObject[kState].headersList + responseObject[kHeaders][kGuard] = 'immutable' + responseObject[kHeaders][kRealm] = relevantRealm + return responseObject + } + + // https://fetch.spec.whatwg.org/#dom-response-json + static json (data, init = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'Response.json' }) + + if (init !== null) { + init = webidl.converters.ResponseInit(init) + } + + // 1. Let bytes the result of running serialize a JavaScript value to JSON bytes on data. + const bytes = textEncoder.encode( + serializeJavascriptValueToJSONString(data) + ) + + // 2. Let body be the result of extracting bytes. + const body = extractBody(bytes) + + // 3. Let responseObject be the result of creating a Response object, given a new response, + // "response", and this’s relevant Realm. + const relevantRealm = { settingsObject: {} } + const responseObject = new Response() + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kGuard] = 'response' + responseObject[kHeaders][kRealm] = relevantRealm + + // 4. Perform initialize a response given responseObject, init, and (body, "application/json"). + initializeResponse(responseObject, init, { body: body[0], type: 'application/json' }) + + // 5. Return responseObject. + return responseObject + } + + // Creates a redirect Response that redirects to url with status status. + static redirect (url, status = 302) { + const relevantRealm = { settingsObject: {} } + + webidl.argumentLengthCheck(arguments, 1, { header: 'Response.redirect' }) + + url = webidl.converters.USVString(url) + status = webidl.converters['unsigned short'](status) + + // 1. Let parsedURL be the result of parsing url with current settings + // object’s API base URL. + // 2. If parsedURL is failure, then throw a TypeError. + // TODO: base-URL? + let parsedURL + try { + parsedURL = new URL(url, getGlobalOrigin()) + } catch (err) { + throw Object.assign(new TypeError('Failed to parse URL from ' + url), { + cause: err + }) + } + + // 3. If status is not a redirect status, then throw a RangeError. + if (!redirectStatusSet.has(status)) { + throw new RangeError('Invalid status code ' + status) + } + + // 4. Let responseObject be the result of creating a Response object, + // given a new response, "immutable", and this’s relevant Realm. + const responseObject = new Response() + responseObject[kRealm] = relevantRealm + responseObject[kHeaders][kGuard] = 'immutable' + responseObject[kHeaders][kRealm] = relevantRealm + + // 5. Set responseObject’s response’s status to status. + responseObject[kState].status = status + + // 6. Let value be parsedURL, serialized and isomorphic encoded. + const value = isomorphicEncode(URLSerializer(parsedURL)) + + // 7. Append `Location`/value to responseObject’s response’s header list. + responseObject[kState].headersList.append('location', value) + + // 8. Return responseObject. + return responseObject + } + + // https://fetch.spec.whatwg.org/#dom-response + constructor (body = null, init = {}) { + if (body !== null) { + body = webidl.converters.BodyInit(body) + } + + init = webidl.converters.ResponseInit(init) + + // TODO + this[kRealm] = { settingsObject: {} } + + // 1. Set this’s response to a new response. + this[kState] = makeResponse({}) + + // 2. Set this’s headers to a new Headers object with this’s relevant + // Realm, whose header list is this’s response’s header list and guard + // is "response". + this[kHeaders] = new Headers(kConstruct) + this[kHeaders][kGuard] = 'response' + this[kHeaders][kHeadersList] = this[kState].headersList + this[kHeaders][kRealm] = this[kRealm] + + // 3. Let bodyWithType be null. + let bodyWithType = null + + // 4. If body is non-null, then set bodyWithType to the result of extracting body. + if (body != null) { + const [extractedBody, type] = extractBody(body) + bodyWithType = { body: extractedBody, type } + } + + // 5. Perform initialize a response given this, init, and bodyWithType. + initializeResponse(this, init, bodyWithType) + } + + // Returns response’s type, e.g., "cors". + get type () { + webidl.brandCheck(this, Response) + + // The type getter steps are to return this’s response’s type. + return this[kState].type + } + + // Returns response’s URL, if it has one; otherwise the empty string. + get url () { + webidl.brandCheck(this, Response) + + const urlList = this[kState].urlList + + // The url getter steps are to return the empty string if this’s + // response’s URL is null; otherwise this’s response’s URL, + // serialized with exclude fragment set to true. + const url = urlList[urlList.length - 1] ?? null + + if (url === null) { + return '' + } + + return URLSerializer(url, true) + } + + // Returns whether response was obtained through a redirect. + get redirected () { + webidl.brandCheck(this, Response) + + // The redirected getter steps are to return true if this’s response’s URL + // list has more than one item; otherwise false. + return this[kState].urlList.length > 1 + } + + // Returns response’s status. + get status () { + webidl.brandCheck(this, Response) + + // The status getter steps are to return this’s response’s status. + return this[kState].status + } + + // Returns whether response’s status is an ok status. + get ok () { + webidl.brandCheck(this, Response) + + // The ok getter steps are to return true if this’s response’s status is an + // ok status; otherwise false. + return this[kState].status >= 200 && this[kState].status <= 299 + } + + // Returns response’s status message. + get statusText () { + webidl.brandCheck(this, Response) + + // The statusText getter steps are to return this’s response’s status + // message. + return this[kState].statusText + } + + // Returns response’s headers as Headers. + get headers () { + webidl.brandCheck(this, Response) + + // The headers getter steps are to return this’s headers. + return this[kHeaders] + } + + get body () { + webidl.brandCheck(this, Response) + + return this[kState].body ? this[kState].body.stream : null + } + + get bodyUsed () { + webidl.brandCheck(this, Response) + + return !!this[kState].body && util.isDisturbed(this[kState].body.stream) + } + + // Returns a clone of response. + clone () { + webidl.brandCheck(this, Response) + + // 1. If this is unusable, then throw a TypeError. + if (this.bodyUsed || (this.body && this.body.locked)) { + throw webidl.errors.exception({ + header: 'Response.clone', + message: 'Body has already been consumed.' + }) + } + + // 2. Let clonedResponse be the result of cloning this’s response. + const clonedResponse = cloneResponse(this[kState]) + + // 3. Return the result of creating a Response object, given + // clonedResponse, this’s headers’s guard, and this’s relevant Realm. + const clonedResponseObject = new Response() + clonedResponseObject[kState] = clonedResponse + clonedResponseObject[kRealm] = this[kRealm] + clonedResponseObject[kHeaders][kHeadersList] = clonedResponse.headersList + clonedResponseObject[kHeaders][kGuard] = this[kHeaders][kGuard] + clonedResponseObject[kHeaders][kRealm] = this[kHeaders][kRealm] + + return clonedResponseObject + } +} + +mixinBody(Response) + +Object.defineProperties(Response.prototype, { + type: kEnumerableProperty, + url: kEnumerableProperty, + status: kEnumerableProperty, + ok: kEnumerableProperty, + redirected: kEnumerableProperty, + statusText: kEnumerableProperty, + headers: kEnumerableProperty, + clone: kEnumerableProperty, + body: kEnumerableProperty, + bodyUsed: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'Response', + configurable: true + } +}) + +Object.defineProperties(Response, { + json: kEnumerableProperty, + redirect: kEnumerableProperty, + error: kEnumerableProperty +}) + +// https://fetch.spec.whatwg.org/#concept-response-clone +function cloneResponse (response) { + // To clone a response response, run these steps: + + // 1. If response is a filtered response, then return a new identical + // filtered response whose internal response is a clone of response’s + // internal response. + if (response.internalResponse) { + return filterResponse( + cloneResponse(response.internalResponse), + response.type + ) + } + + // 2. Let newResponse be a copy of response, except for its body. + const newResponse = makeResponse({ ...response, body: null }) + + // 3. If response’s body is non-null, then set newResponse’s body to the + // result of cloning response’s body. + if (response.body != null) { + newResponse.body = cloneBody(response.body) + } + + // 4. Return newResponse. + return newResponse +} + +function makeResponse (init) { + return { + aborted: false, + rangeRequested: false, + timingAllowPassed: false, + requestIncludesCredentials: false, + type: 'default', + status: 200, + timingInfo: null, + cacheState: '', + statusText: '', + ...init, + headersList: init.headersList + ? new HeadersList(init.headersList) + : new HeadersList(), + urlList: init.urlList ? [...init.urlList] : [] + } +} + +function makeNetworkError (reason) { + const isError = isErrorLike(reason) + return makeResponse({ + type: 'error', + status: 0, + error: isError + ? reason + : new Error(reason ? String(reason) : reason), + aborted: reason && reason.name === 'AbortError' + }) +} + +function makeFilteredResponse (response, state) { + state = { + internalResponse: response, + ...state + } + + return new Proxy(response, { + get (target, p) { + return p in state ? state[p] : target[p] + }, + set (target, p, value) { + assert(!(p in state)) + target[p] = value + return true + } + }) +} + +// https://fetch.spec.whatwg.org/#concept-filtered-response +function filterResponse (response, type) { + // Set response to the following filtered response with response as its + // internal response, depending on request’s response tainting: + if (type === 'basic') { + // A basic filtered response is a filtered response whose type is "basic" + // and header list excludes any headers in internal response’s header list + // whose name is a forbidden response-header name. + + // Note: undici does not implement forbidden response-header names + return makeFilteredResponse(response, { + type: 'basic', + headersList: response.headersList + }) + } else if (type === 'cors') { + // A CORS filtered response is a filtered response whose type is "cors" + // and header list excludes any headers in internal response’s header + // list whose name is not a CORS-safelisted response-header name, given + // internal response’s CORS-exposed header-name list. + + // Note: undici does not implement CORS-safelisted response-header names + return makeFilteredResponse(response, { + type: 'cors', + headersList: response.headersList + }) + } else if (type === 'opaque') { + // An opaque filtered response is a filtered response whose type is + // "opaque", URL list is the empty list, status is 0, status message + // is the empty byte sequence, header list is empty, and body is null. + + return makeFilteredResponse(response, { + type: 'opaque', + urlList: Object.freeze([]), + status: 0, + statusText: '', + body: null + }) + } else if (type === 'opaqueredirect') { + // An opaque-redirect filtered response is a filtered response whose type + // is "opaqueredirect", status is 0, status message is the empty byte + // sequence, header list is empty, and body is null. + + return makeFilteredResponse(response, { + type: 'opaqueredirect', + status: 0, + statusText: '', + headersList: [], + body: null + }) + } else { + assert(false) + } +} + +// https://fetch.spec.whatwg.org/#appropriate-network-error +function makeAppropriateNetworkError (fetchParams, err = null) { + // 1. Assert: fetchParams is canceled. + assert(isCancelled(fetchParams)) + + // 2. Return an aborted network error if fetchParams is aborted; + // otherwise return a network error. + return isAborted(fetchParams) + ? makeNetworkError(Object.assign(new DOMException('The operation was aborted.', 'AbortError'), { cause: err })) + : makeNetworkError(Object.assign(new DOMException('Request was cancelled.'), { cause: err })) +} + +// https://whatpr.org/fetch/1392.html#initialize-a-response +function initializeResponse (response, init, body) { + // 1. If init["status"] is not in the range 200 to 599, inclusive, then + // throw a RangeError. + if (init.status !== null && (init.status < 200 || init.status > 599)) { + throw new RangeError('init["status"] must be in the range of 200 to 599, inclusive.') + } + + // 2. If init["statusText"] does not match the reason-phrase token production, + // then throw a TypeError. + if ('statusText' in init && init.statusText != null) { + // See, https://datatracker.ietf.org/doc/html/rfc7230#section-3.1.2: + // reason-phrase = *( HTAB / SP / VCHAR / obs-text ) + if (!isValidReasonPhrase(String(init.statusText))) { + throw new TypeError('Invalid statusText') + } + } + + // 3. Set response’s response’s status to init["status"]. + if ('status' in init && init.status != null) { + response[kState].status = init.status + } + + // 4. Set response’s response’s status message to init["statusText"]. + if ('statusText' in init && init.statusText != null) { + response[kState].statusText = init.statusText + } + + // 5. If init["headers"] exists, then fill response’s headers with init["headers"]. + if ('headers' in init && init.headers != null) { + fill(response[kHeaders], init.headers) + } + + // 6. If body was given, then: + if (body) { + // 1. If response's status is a null body status, then throw a TypeError. + if (nullBodyStatus.includes(response.status)) { + throw webidl.errors.exception({ + header: 'Response constructor', + message: 'Invalid response status code ' + response.status + }) + } + + // 2. Set response's body to body's body. + response[kState].body = body.body + + // 3. If body's type is non-null and response's header list does not contain + // `Content-Type`, then append (`Content-Type`, body's type) to response's header list. + if (body.type != null && !response[kState].headersList.contains('Content-Type')) { + response[kState].headersList.append('content-type', body.type) + } + } +} + +webidl.converters.ReadableStream = webidl.interfaceConverter( + ReadableStream +) + +webidl.converters.FormData = webidl.interfaceConverter( + FormData +) + +webidl.converters.URLSearchParams = webidl.interfaceConverter( + URLSearchParams +) + +// https://fetch.spec.whatwg.org/#typedefdef-xmlhttprequestbodyinit +webidl.converters.XMLHttpRequestBodyInit = function (V) { + if (typeof V === 'string') { + return webidl.converters.USVString(V) + } + + if (isBlobLike(V)) { + return webidl.converters.Blob(V, { strict: false }) + } + + if (types.isArrayBuffer(V) || types.isTypedArray(V) || types.isDataView(V)) { + return webidl.converters.BufferSource(V) + } + + if (util.isFormDataLike(V)) { + return webidl.converters.FormData(V, { strict: false }) + } + + if (V instanceof URLSearchParams) { + return webidl.converters.URLSearchParams(V) + } + + return webidl.converters.DOMString(V) +} + +// https://fetch.spec.whatwg.org/#bodyinit +webidl.converters.BodyInit = function (V) { + if (V instanceof ReadableStream) { + return webidl.converters.ReadableStream(V) + } + + // Note: the spec doesn't include async iterables, + // this is an undici extension. + if (V?.[Symbol.asyncIterator]) { + return V + } + + return webidl.converters.XMLHttpRequestBodyInit(V) +} + +webidl.converters.ResponseInit = webidl.dictionaryConverter([ + { + key: 'status', + converter: webidl.converters['unsigned short'], + defaultValue: 200 + }, + { + key: 'statusText', + converter: webidl.converters.ByteString, + defaultValue: '' + }, + { + key: 'headers', + converter: webidl.converters.HeadersInit + } +]) + +module.exports = { + makeNetworkError, + makeResponse, + makeAppropriateNetworkError, + filterResponse, + Response, + cloneResponse +} + + +/***/ }), + +/***/ 9710: +/***/ ((module) => { + +"use strict"; + + +module.exports = { + kUrl: Symbol('url'), + kHeaders: Symbol('headers'), + kSignal: Symbol('signal'), + kState: Symbol('state'), + kGuard: Symbol('guard'), + kRealm: Symbol('realm') +} + + +/***/ }), + +/***/ 5523: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { redirectStatusSet, referrerPolicySet: referrerPolicyTokens, badPortsSet } = __nccwpck_require__(7326) +const { getGlobalOrigin } = __nccwpck_require__(5628) +const { performance } = __nccwpck_require__(2987) +const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(3440) +const assert = __nccwpck_require__(2613) +const { isUint8Array } = __nccwpck_require__(8253) + +let supportedHashes = [] + +// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable +/** @type {import('crypto')|undefined} */ +let crypto + +try { + crypto = __nccwpck_require__(6982) + const possibleRelevantHashes = ['sha256', 'sha384', 'sha512'] + supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash)) +/* c8 ignore next 3 */ +} catch { +} + +function responseURL (response) { + // https://fetch.spec.whatwg.org/#responses + // A response has an associated URL. It is a pointer to the last URL + // in response’s URL list and null if response’s URL list is empty. + const urlList = response.urlList + const length = urlList.length + return length === 0 ? null : urlList[length - 1].toString() +} + +// https://fetch.spec.whatwg.org/#concept-response-location-url +function responseLocationURL (response, requestFragment) { + // 1. If response’s status is not a redirect status, then return null. + if (!redirectStatusSet.has(response.status)) { + return null + } + + // 2. Let location be the result of extracting header list values given + // `Location` and response’s header list. + let location = response.headersList.get('location') + + // 3. If location is a header value, then set location to the result of + // parsing location with response’s URL. + if (location !== null && isValidHeaderValue(location)) { + location = new URL(location, responseURL(response)) + } + + // 4. If location is a URL whose fragment is null, then set location’s + // fragment to requestFragment. + if (location && !location.hash) { + location.hash = requestFragment + } + + // 5. Return location. + return location +} + +/** @returns {URL} */ +function requestCurrentURL (request) { + return request.urlList[request.urlList.length - 1] +} + +function requestBadPort (request) { + // 1. Let url be request’s current URL. + const url = requestCurrentURL(request) + + // 2. If url’s scheme is an HTTP(S) scheme and url’s port is a bad port, + // then return blocked. + if (urlIsHttpHttpsScheme(url) && badPortsSet.has(url.port)) { + return 'blocked' + } + + // 3. Return allowed. + return 'allowed' +} + +function isErrorLike (object) { + return object instanceof Error || ( + object?.constructor?.name === 'Error' || + object?.constructor?.name === 'DOMException' + ) +} + +// Check whether |statusText| is a ByteString and +// matches the Reason-Phrase token production. +// RFC 2616: https://tools.ietf.org/html/rfc2616 +// RFC 7230: https://tools.ietf.org/html/rfc7230 +// "reason-phrase = *( HTAB / SP / VCHAR / obs-text )" +// https://github.com/chromium/chromium/blob/94.0.4604.1/third_party/blink/renderer/core/fetch/response.cc#L116 +function isValidReasonPhrase (statusText) { + for (let i = 0; i < statusText.length; ++i) { + const c = statusText.charCodeAt(i) + if ( + !( + ( + c === 0x09 || // HTAB + (c >= 0x20 && c <= 0x7e) || // SP / VCHAR + (c >= 0x80 && c <= 0xff) + ) // obs-text + ) + ) { + return false + } + } + return true +} + +/** + * @see https://tools.ietf.org/html/rfc7230#section-3.2.6 + * @param {number} c + */ +function isTokenCharCode (c) { + switch (c) { + case 0x22: + case 0x28: + case 0x29: + case 0x2c: + case 0x2f: + case 0x3a: + case 0x3b: + case 0x3c: + case 0x3d: + case 0x3e: + case 0x3f: + case 0x40: + case 0x5b: + case 0x5c: + case 0x5d: + case 0x7b: + case 0x7d: + // DQUOTE and "(),/:;<=>?@[\]{}" + return false + default: + // VCHAR %x21-7E + return c >= 0x21 && c <= 0x7e + } +} + +/** + * @param {string} characters + */ +function isValidHTTPToken (characters) { + if (characters.length === 0) { + return false + } + for (let i = 0; i < characters.length; ++i) { + if (!isTokenCharCode(characters.charCodeAt(i))) { + return false + } + } + return true +} + +/** + * @see https://fetch.spec.whatwg.org/#header-name + * @param {string} potentialValue + */ +function isValidHeaderName (potentialValue) { + return isValidHTTPToken(potentialValue) +} + +/** + * @see https://fetch.spec.whatwg.org/#header-value + * @param {string} potentialValue + */ +function isValidHeaderValue (potentialValue) { + // - Has no leading or trailing HTTP tab or space bytes. + // - Contains no 0x00 (NUL) or HTTP newline bytes. + if ( + potentialValue.startsWith('\t') || + potentialValue.startsWith(' ') || + potentialValue.endsWith('\t') || + potentialValue.endsWith(' ') + ) { + return false + } + + if ( + potentialValue.includes('\0') || + potentialValue.includes('\r') || + potentialValue.includes('\n') + ) { + return false + } + + return true +} + +// https://w3c.github.io/webappsec-referrer-policy/#set-requests-referrer-policy-on-redirect +function setRequestReferrerPolicyOnRedirect (request, actualResponse) { + // Given a request request and a response actualResponse, this algorithm + // updates request’s referrer policy according to the Referrer-Policy + // header (if any) in actualResponse. + + // 1. Let policy be the result of executing § 8.1 Parse a referrer policy + // from a Referrer-Policy header on actualResponse. + + // 8.1 Parse a referrer policy from a Referrer-Policy header + // 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy` and response’s header list. + const { headersList } = actualResponse + // 2. Let policy be the empty string. + // 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty string, then set policy to token. + // 4. Return policy. + const policyHeader = (headersList.get('referrer-policy') ?? '').split(',') + + // Note: As the referrer-policy can contain multiple policies + // separated by comma, we need to loop through all of them + // and pick the first valid one. + // Ref: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy#specify_a_fallback_policy + let policy = '' + if (policyHeader.length > 0) { + // The right-most policy takes precedence. + // The left-most policy is the fallback. + for (let i = policyHeader.length; i !== 0; i--) { + const token = policyHeader[i - 1].trim() + if (referrerPolicyTokens.has(token)) { + policy = token + break + } + } + } + + // 2. If policy is not the empty string, then set request’s referrer policy to policy. + if (policy !== '') { + request.referrerPolicy = policy + } +} + +// https://fetch.spec.whatwg.org/#cross-origin-resource-policy-check +function crossOriginResourcePolicyCheck () { + // TODO + return 'allowed' +} + +// https://fetch.spec.whatwg.org/#concept-cors-check +function corsCheck () { + // TODO + return 'success' +} + +// https://fetch.spec.whatwg.org/#concept-tao-check +function TAOCheck () { + // TODO + return 'success' +} + +function appendFetchMetadata (httpRequest) { + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-dest-header + // TODO + + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-mode-header + + // 1. Assert: r’s url is a potentially trustworthy URL. + // TODO + + // 2. Let header be a Structured Header whose value is a token. + let header = null + + // 3. Set header’s value to r’s mode. + header = httpRequest.mode + + // 4. Set a structured field value `Sec-Fetch-Mode`/header in r’s header list. + httpRequest.headersList.set('sec-fetch-mode', header) + + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-site-header + // TODO + + // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-user-header + // TODO +} + +// https://fetch.spec.whatwg.org/#append-a-request-origin-header +function appendRequestOriginHeader (request) { + // 1. Let serializedOrigin be the result of byte-serializing a request origin with request. + let serializedOrigin = request.origin + + // 2. If request’s response tainting is "cors" or request’s mode is "websocket", then append (`Origin`, serializedOrigin) to request’s header list. + if (request.responseTainting === 'cors' || request.mode === 'websocket') { + if (serializedOrigin) { + request.headersList.append('origin', serializedOrigin) + } + + // 3. Otherwise, if request’s method is neither `GET` nor `HEAD`, then: + } else if (request.method !== 'GET' && request.method !== 'HEAD') { + // 1. Switch on request’s referrer policy: + switch (request.referrerPolicy) { + case 'no-referrer': + // Set serializedOrigin to `null`. + serializedOrigin = null + break + case 'no-referrer-when-downgrade': + case 'strict-origin': + case 'strict-origin-when-cross-origin': + // If request’s origin is a tuple origin, its scheme is "https", and request’s current URL’s scheme is not "https", then set serializedOrigin to `null`. + if (request.origin && urlHasHttpsScheme(request.origin) && !urlHasHttpsScheme(requestCurrentURL(request))) { + serializedOrigin = null + } + break + case 'same-origin': + // If request’s origin is not same origin with request’s current URL’s origin, then set serializedOrigin to `null`. + if (!sameOrigin(request, requestCurrentURL(request))) { + serializedOrigin = null + } + break + default: + // Do nothing. + } + + if (serializedOrigin) { + // 2. Append (`Origin`, serializedOrigin) to request’s header list. + request.headersList.append('origin', serializedOrigin) + } + } +} + +function coarsenedSharedCurrentTime (crossOriginIsolatedCapability) { + // TODO + return performance.now() +} + +// https://fetch.spec.whatwg.org/#create-an-opaque-timing-info +function createOpaqueTimingInfo (timingInfo) { + return { + startTime: timingInfo.startTime ?? 0, + redirectStartTime: 0, + redirectEndTime: 0, + postRedirectStartTime: timingInfo.startTime ?? 0, + finalServiceWorkerStartTime: 0, + finalNetworkResponseStartTime: 0, + finalNetworkRequestStartTime: 0, + endTime: 0, + encodedBodySize: 0, + decodedBodySize: 0, + finalConnectionTimingInfo: null + } +} + +// https://html.spec.whatwg.org/multipage/origin.html#policy-container +function makePolicyContainer () { + // Note: the fetch spec doesn't make use of embedder policy or CSP list + return { + referrerPolicy: 'strict-origin-when-cross-origin' + } +} + +// https://html.spec.whatwg.org/multipage/origin.html#clone-a-policy-container +function clonePolicyContainer (policyContainer) { + return { + referrerPolicy: policyContainer.referrerPolicy + } +} + +// https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer +function determineRequestsReferrer (request) { + // 1. Let policy be request's referrer policy. + const policy = request.referrerPolicy + + // Note: policy cannot (shouldn't) be null or an empty string. + assert(policy) + + // 2. Let environment be request’s client. + + let referrerSource = null + + // 3. Switch on request’s referrer: + if (request.referrer === 'client') { + // Note: node isn't a browser and doesn't implement document/iframes, + // so we bypass this step and replace it with our own. + + const globalOrigin = getGlobalOrigin() + + if (!globalOrigin || globalOrigin.origin === 'null') { + return 'no-referrer' + } + + // note: we need to clone it as it's mutated + referrerSource = new URL(globalOrigin) + } else if (request.referrer instanceof URL) { + // Let referrerSource be request’s referrer. + referrerSource = request.referrer + } + + // 4. Let request’s referrerURL be the result of stripping referrerSource for + // use as a referrer. + let referrerURL = stripURLForReferrer(referrerSource) + + // 5. Let referrerOrigin be the result of stripping referrerSource for use as + // a referrer, with the origin-only flag set to true. + const referrerOrigin = stripURLForReferrer(referrerSource, true) + + // 6. If the result of serializing referrerURL is a string whose length is + // greater than 4096, set referrerURL to referrerOrigin. + if (referrerURL.toString().length > 4096) { + referrerURL = referrerOrigin + } + + const areSameOrigin = sameOrigin(request, referrerURL) + const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy(referrerURL) && + !isURLPotentiallyTrustworthy(request.url) + + // 8. Execute the switch statements corresponding to the value of policy: + switch (policy) { + case 'origin': return referrerOrigin != null ? referrerOrigin : stripURLForReferrer(referrerSource, true) + case 'unsafe-url': return referrerURL + case 'same-origin': + return areSameOrigin ? referrerOrigin : 'no-referrer' + case 'origin-when-cross-origin': + return areSameOrigin ? referrerURL : referrerOrigin + case 'strict-origin-when-cross-origin': { + const currentURL = requestCurrentURL(request) + + // 1. If the origin of referrerURL and the origin of request’s current + // URL are the same, then return referrerURL. + if (sameOrigin(referrerURL, currentURL)) { + return referrerURL + } + + // 2. If referrerURL is a potentially trustworthy URL and request’s + // current URL is not a potentially trustworthy URL, then return no + // referrer. + if (isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(currentURL)) { + return 'no-referrer' + } + + // 3. Return referrerOrigin. + return referrerOrigin + } + case 'strict-origin': // eslint-disable-line + /** + * 1. If referrerURL is a potentially trustworthy URL and + * request’s current URL is not a potentially trustworthy URL, + * then return no referrer. + * 2. Return referrerOrigin + */ + case 'no-referrer-when-downgrade': // eslint-disable-line + /** + * 1. If referrerURL is a potentially trustworthy URL and + * request’s current URL is not a potentially trustworthy URL, + * then return no referrer. + * 2. Return referrerOrigin + */ + + default: // eslint-disable-line + return isNonPotentiallyTrustWorthy ? 'no-referrer' : referrerOrigin + } +} + +/** + * @see https://w3c.github.io/webappsec-referrer-policy/#strip-url + * @param {URL} url + * @param {boolean|undefined} originOnly + */ +function stripURLForReferrer (url, originOnly) { + // 1. Assert: url is a URL. + assert(url instanceof URL) + + // 2. If url’s scheme is a local scheme, then return no referrer. + if (url.protocol === 'file:' || url.protocol === 'about:' || url.protocol === 'blank:') { + return 'no-referrer' + } + + // 3. Set url’s username to the empty string. + url.username = '' + + // 4. Set url’s password to the empty string. + url.password = '' + + // 5. Set url’s fragment to null. + url.hash = '' + + // 6. If the origin-only flag is true, then: + if (originOnly) { + // 1. Set url’s path to « the empty string ». + url.pathname = '' + + // 2. Set url’s query to null. + url.search = '' + } + + // 7. Return url. + return url +} + +function isURLPotentiallyTrustworthy (url) { + if (!(url instanceof URL)) { + return false + } + + // If child of about, return true + if (url.href === 'about:blank' || url.href === 'about:srcdoc') { + return true + } + + // If scheme is data, return true + if (url.protocol === 'data:') return true + + // If file, return true + if (url.protocol === 'file:') return true + + return isOriginPotentiallyTrustworthy(url.origin) + + function isOriginPotentiallyTrustworthy (origin) { + // If origin is explicitly null, return false + if (origin == null || origin === 'null') return false + + const originAsURL = new URL(origin) + + // If secure, return true + if (originAsURL.protocol === 'https:' || originAsURL.protocol === 'wss:') { + return true + } + + // If localhost or variants, return true + if (/^127(?:\.[0-9]+){0,2}\.[0-9]+$|^\[(?:0*:)*?:?0*1\]$/.test(originAsURL.hostname) || + (originAsURL.hostname === 'localhost' || originAsURL.hostname.includes('localhost.')) || + (originAsURL.hostname.endsWith('.localhost'))) { + return true + } + + // If any other, return false + return false + } +} + +/** + * @see https://w3c.github.io/webappsec-subresource-integrity/#does-response-match-metadatalist + * @param {Uint8Array} bytes + * @param {string} metadataList + */ +function bytesMatch (bytes, metadataList) { + // If node is not built with OpenSSL support, we cannot check + // a request's integrity, so allow it by default (the spec will + // allow requests if an invalid hash is given, as precedence). + /* istanbul ignore if: only if node is built with --without-ssl */ + if (crypto === undefined) { + return true + } + + // 1. Let parsedMetadata be the result of parsing metadataList. + const parsedMetadata = parseMetadata(metadataList) + + // 2. If parsedMetadata is no metadata, return true. + if (parsedMetadata === 'no metadata') { + return true + } + + // 3. If response is not eligible for integrity validation, return false. + // TODO + + // 4. If parsedMetadata is the empty set, return true. + if (parsedMetadata.length === 0) { + return true + } + + // 5. Let metadata be the result of getting the strongest + // metadata from parsedMetadata. + const strongest = getStrongestMetadata(parsedMetadata) + const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest) + + // 6. For each item in metadata: + for (const item of metadata) { + // 1. Let algorithm be the alg component of item. + const algorithm = item.algo + + // 2. Let expectedValue be the val component of item. + const expectedValue = item.hash + + // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e + // "be liberal with padding". This is annoying, and it's not even in the spec. + + // 3. Let actualValue be the result of applying algorithm to bytes. + let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') + + if (actualValue[actualValue.length - 1] === '=') { + if (actualValue[actualValue.length - 2] === '=') { + actualValue = actualValue.slice(0, -2) + } else { + actualValue = actualValue.slice(0, -1) + } + } + + // 4. If actualValue is a case-sensitive match for expectedValue, + // return true. + if (compareBase64Mixed(actualValue, expectedValue)) { + return true + } + } + + // 7. Return false. + return false +} + +// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options +// https://www.w3.org/TR/CSP2/#source-list-syntax +// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 +const parseHashWithOptions = /(?sha256|sha384|sha512)-((?[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i + +/** + * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata + * @param {string} metadata + */ +function parseMetadata (metadata) { + // 1. Let result be the empty set. + /** @type {{ algo: string, hash: string }[]} */ + const result = [] + + // 2. Let empty be equal to true. + let empty = true + + // 3. For each token returned by splitting metadata on spaces: + for (const token of metadata.split(' ')) { + // 1. Set empty to false. + empty = false + + // 2. Parse token as a hash-with-options. + const parsedToken = parseHashWithOptions.exec(token) + + // 3. If token does not parse, continue to the next token. + if ( + parsedToken === null || + parsedToken.groups === undefined || + parsedToken.groups.algo === undefined + ) { + // Note: Chromium blocks the request at this point, but Firefox + // gives a warning that an invalid integrity was given. The + // correct behavior is to ignore these, and subsequently not + // check the integrity of the resource. + continue + } + + // 4. Let algorithm be the hash-algo component of token. + const algorithm = parsedToken.groups.algo.toLowerCase() + + // 5. If algorithm is a hash function recognized by the user + // agent, add the parsed token to result. + if (supportedHashes.includes(algorithm)) { + result.push(parsedToken.groups) + } + } + + // 4. Return no metadata if empty is true, otherwise return result. + if (empty === true) { + return 'no metadata' + } + + return result +} + +/** + * @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList + */ +function getStrongestMetadata (metadataList) { + // Let algorithm be the algo component of the first item in metadataList. + // Can be sha256 + let algorithm = metadataList[0].algo + // If the algorithm is sha512, then it is the strongest + // and we can return immediately + if (algorithm[3] === '5') { + return algorithm + } + + for (let i = 1; i < metadataList.length; ++i) { + const metadata = metadataList[i] + // If the algorithm is sha512, then it is the strongest + // and we can break the loop immediately + if (metadata.algo[3] === '5') { + algorithm = 'sha512' + break + // If the algorithm is sha384, then a potential sha256 or sha384 is ignored + } else if (algorithm[3] === '3') { + continue + // algorithm is sha256, check if algorithm is sha384 and if so, set it as + // the strongest + } else if (metadata.algo[3] === '3') { + algorithm = 'sha384' + } + } + return algorithm +} + +function filterMetadataListByAlgorithm (metadataList, algorithm) { + if (metadataList.length === 1) { + return metadataList + } + + let pos = 0 + for (let i = 0; i < metadataList.length; ++i) { + if (metadataList[i].algo === algorithm) { + metadataList[pos++] = metadataList[i] + } + } + + metadataList.length = pos + + return metadataList +} + +/** + * Compares two base64 strings, allowing for base64url + * in the second string. + * +* @param {string} actualValue always base64 + * @param {string} expectedValue base64 or base64url + * @returns {boolean} + */ +function compareBase64Mixed (actualValue, expectedValue) { + if (actualValue.length !== expectedValue.length) { + return false + } + for (let i = 0; i < actualValue.length; ++i) { + if (actualValue[i] !== expectedValue[i]) { + if ( + (actualValue[i] === '+' && expectedValue[i] === '-') || + (actualValue[i] === '/' && expectedValue[i] === '_') + ) { + continue + } + return false + } + } + + return true +} + +// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request +function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { + // TODO +} + +/** + * @link {https://html.spec.whatwg.org/multipage/origin.html#same-origin} + * @param {URL} A + * @param {URL} B + */ +function sameOrigin (A, B) { + // 1. If A and B are the same opaque origin, then return true. + if (A.origin === B.origin && A.origin === 'null') { + return true + } + + // 2. If A and B are both tuple origins and their schemes, + // hosts, and port are identical, then return true. + if (A.protocol === B.protocol && A.hostname === B.hostname && A.port === B.port) { + return true + } + + // 3. Return false. + return false +} + +function createDeferredPromise () { + let res + let rej + const promise = new Promise((resolve, reject) => { + res = resolve + rej = reject + }) + + return { promise, resolve: res, reject: rej } +} + +function isAborted (fetchParams) { + return fetchParams.controller.state === 'aborted' +} + +function isCancelled (fetchParams) { + return fetchParams.controller.state === 'aborted' || + fetchParams.controller.state === 'terminated' +} + +const normalizeMethodRecord = { + delete: 'DELETE', + DELETE: 'DELETE', + get: 'GET', + GET: 'GET', + head: 'HEAD', + HEAD: 'HEAD', + options: 'OPTIONS', + OPTIONS: 'OPTIONS', + post: 'POST', + POST: 'POST', + put: 'PUT', + PUT: 'PUT' +} + +// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. +Object.setPrototypeOf(normalizeMethodRecord, null) + +/** + * @see https://fetch.spec.whatwg.org/#concept-method-normalize + * @param {string} method + */ +function normalizeMethod (method) { + return normalizeMethodRecord[method.toLowerCase()] ?? method +} + +// https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string +function serializeJavascriptValueToJSONString (value) { + // 1. Let result be ? Call(%JSON.stringify%, undefined, « value »). + const result = JSON.stringify(value) + + // 2. If result is undefined, then throw a TypeError. + if (result === undefined) { + throw new TypeError('Value is not JSON serializable') + } + + // 3. Assert: result is a string. + assert(typeof result === 'string') + + // 4. Return result. + return result +} + +// https://tc39.es/ecma262/#sec-%25iteratorprototype%25-object +const esIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())) + +/** + * @see https://webidl.spec.whatwg.org/#dfn-iterator-prototype-object + * @param {() => unknown[]} iterator + * @param {string} name name of the instance + * @param {'key'|'value'|'key+value'} kind + */ +function makeIterator (iterator, name, kind) { + const object = { + index: 0, + kind, + target: iterator + } + + const i = { + next () { + // 1. Let interface be the interface for which the iterator prototype object exists. + + // 2. Let thisValue be the this value. + + // 3. Let object be ? ToObject(thisValue). + + // 4. If object is a platform object, then perform a security + // check, passing: + + // 5. If object is not a default iterator object for interface, + // then throw a TypeError. + if (Object.getPrototypeOf(this) !== i) { + throw new TypeError( + `'next' called on an object that does not implement interface ${name} Iterator.` + ) + } + + // 6. Let index be object’s index. + // 7. Let kind be object’s kind. + // 8. Let values be object’s target's value pairs to iterate over. + const { index, kind, target } = object + const values = target() + + // 9. Let len be the length of values. + const len = values.length + + // 10. If index is greater than or equal to len, then return + // CreateIterResultObject(undefined, true). + if (index >= len) { + return { value: undefined, done: true } + } + + // 11. Let pair be the entry in values at index index. + const pair = values[index] + + // 12. Set object’s index to index + 1. + object.index = index + 1 + + // 13. Return the iterator result for pair and kind. + return iteratorResult(pair, kind) + }, + // The class string of an iterator prototype object for a given interface is the + // result of concatenating the identifier of the interface and the string " Iterator". + [Symbol.toStringTag]: `${name} Iterator` + } + + // The [[Prototype]] internal slot of an iterator prototype object must be %IteratorPrototype%. + Object.setPrototypeOf(i, esIteratorPrototype) + // esIteratorPrototype needs to be the prototype of i + // which is the prototype of an empty object. Yes, it's confusing. + return Object.setPrototypeOf({}, i) +} + +// https://webidl.spec.whatwg.org/#iterator-result +function iteratorResult (pair, kind) { + let result + + // 1. Let result be a value determined by the value of kind: + switch (kind) { + case 'key': { + // 1. Let idlKey be pair’s key. + // 2. Let key be the result of converting idlKey to an + // ECMAScript value. + // 3. result is key. + result = pair[0] + break + } + case 'value': { + // 1. Let idlValue be pair’s value. + // 2. Let value be the result of converting idlValue to + // an ECMAScript value. + // 3. result is value. + result = pair[1] + break + } + case 'key+value': { + // 1. Let idlKey be pair’s key. + // 2. Let idlValue be pair’s value. + // 3. Let key be the result of converting idlKey to an + // ECMAScript value. + // 4. Let value be the result of converting idlValue to + // an ECMAScript value. + // 5. Let array be ! ArrayCreate(2). + // 6. Call ! CreateDataProperty(array, "0", key). + // 7. Call ! CreateDataProperty(array, "1", value). + // 8. result is array. + result = pair + break + } + } + + // 2. Return CreateIterResultObject(result, false). + return { value: result, done: false } +} + +/** + * @see https://fetch.spec.whatwg.org/#body-fully-read + */ +async function fullyReadBody (body, processBody, processBodyError) { + // 1. If taskDestination is null, then set taskDestination to + // the result of starting a new parallel queue. + + // 2. Let successSteps given a byte sequence bytes be to queue a + // fetch task to run processBody given bytes, with taskDestination. + const successSteps = processBody + + // 3. Let errorSteps be to queue a fetch task to run processBodyError, + // with taskDestination. + const errorSteps = processBodyError + + // 4. Let reader be the result of getting a reader for body’s stream. + // If that threw an exception, then run errorSteps with that + // exception and return. + let reader + + try { + reader = body.stream.getReader() + } catch (e) { + errorSteps(e) + return + } + + // 5. Read all bytes from reader, given successSteps and errorSteps. + try { + const result = await readAllBytes(reader) + successSteps(result) + } catch (e) { + errorSteps(e) + } +} + +/** @type {ReadableStream} */ +let ReadableStream = globalThis.ReadableStream + +function isReadableStreamLike (stream) { + if (!ReadableStream) { + ReadableStream = (__nccwpck_require__(3774).ReadableStream) + } + + return stream instanceof ReadableStream || ( + stream[Symbol.toStringTag] === 'ReadableStream' && + typeof stream.tee === 'function' + ) +} + +const MAXIMUM_ARGUMENT_LENGTH = 65535 + +/** + * @see https://infra.spec.whatwg.org/#isomorphic-decode + * @param {number[]|Uint8Array} input + */ +function isomorphicDecode (input) { + // 1. To isomorphic decode a byte sequence input, return a string whose code point + // length is equal to input’s length and whose code points have the same values + // as the values of input’s bytes, in the same order. + + if (input.length < MAXIMUM_ARGUMENT_LENGTH) { + return String.fromCharCode(...input) + } + + return input.reduce((previous, current) => previous + String.fromCharCode(current), '') +} + +/** + * @param {ReadableStreamController} controller + */ +function readableStreamClose (controller) { + try { + controller.close() + } catch (err) { + // TODO: add comment explaining why this error occurs. + if (!err.message.includes('Controller is already closed')) { + throw err + } + } +} + +/** + * @see https://infra.spec.whatwg.org/#isomorphic-encode + * @param {string} input + */ +function isomorphicEncode (input) { + // 1. Assert: input contains no code points greater than U+00FF. + for (let i = 0; i < input.length; i++) { + assert(input.charCodeAt(i) <= 0xFF) + } + + // 2. Return a byte sequence whose length is equal to input’s code + // point length and whose bytes have the same values as the + // values of input’s code points, in the same order + return input +} + +/** + * @see https://streams.spec.whatwg.org/#readablestreamdefaultreader-read-all-bytes + * @see https://streams.spec.whatwg.org/#read-loop + * @param {ReadableStreamDefaultReader} reader + */ +async function readAllBytes (reader) { + const bytes = [] + let byteLength = 0 + + while (true) { + const { done, value: chunk } = await reader.read() + + if (done) { + // 1. Call successSteps with bytes. + return Buffer.concat(bytes, byteLength) + } + + // 1. If chunk is not a Uint8Array object, call failureSteps + // with a TypeError and abort these steps. + if (!isUint8Array(chunk)) { + throw new TypeError('Received non-Uint8Array chunk') + } + + // 2. Append the bytes represented by chunk to bytes. + bytes.push(chunk) + byteLength += chunk.length + + // 3. Read-loop given reader, bytes, successSteps, and failureSteps. + } +} + +/** + * @see https://fetch.spec.whatwg.org/#is-local + * @param {URL} url + */ +function urlIsLocal (url) { + assert('protocol' in url) // ensure it's a url object + + const protocol = url.protocol + + return protocol === 'about:' || protocol === 'blob:' || protocol === 'data:' +} + +/** + * @param {string|URL} url + */ +function urlHasHttpsScheme (url) { + if (typeof url === 'string') { + return url.startsWith('https:') + } + + return url.protocol === 'https:' +} + +/** + * @see https://fetch.spec.whatwg.org/#http-scheme + * @param {URL} url + */ +function urlIsHttpHttpsScheme (url) { + assert('protocol' in url) // ensure it's a url object + + const protocol = url.protocol + + return protocol === 'http:' || protocol === 'https:' +} + +/** + * Fetch supports node >= 16.8.0, but Object.hasOwn was added in v16.9.0. + */ +const hasOwn = Object.hasOwn || ((dict, key) => Object.prototype.hasOwnProperty.call(dict, key)) + +module.exports = { + isAborted, + isCancelled, + createDeferredPromise, + ReadableStreamFrom, + toUSVString, + tryUpgradeRequestToAPotentiallyTrustworthyURL, + coarsenedSharedCurrentTime, + determineRequestsReferrer, + makePolicyContainer, + clonePolicyContainer, + appendFetchMetadata, + appendRequestOriginHeader, + TAOCheck, + corsCheck, + crossOriginResourcePolicyCheck, + createOpaqueTimingInfo, + setRequestReferrerPolicyOnRedirect, + isValidHTTPToken, + requestBadPort, + requestCurrentURL, + responseURL, + responseLocationURL, + isBlobLike, + isURLPotentiallyTrustworthy, + isValidReasonPhrase, + sameOrigin, + normalizeMethod, + serializeJavascriptValueToJSONString, + makeIterator, + isValidHeaderName, + isValidHeaderValue, + hasOwn, + isErrorLike, + fullyReadBody, + bytesMatch, + isReadableStreamLike, + readableStreamClose, + isomorphicEncode, + isomorphicDecode, + urlIsLocal, + urlHasHttpsScheme, + urlIsHttpHttpsScheme, + readAllBytes, + normalizeMethodRecord, + parseMetadata +} + + +/***/ }), + +/***/ 4222: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { types } = __nccwpck_require__(9023) +const { hasOwn, toUSVString } = __nccwpck_require__(5523) + +/** @type {import('../../types/webidl').Webidl} */ +const webidl = {} +webidl.converters = {} +webidl.util = {} +webidl.errors = {} + +webidl.errors.exception = function (message) { + return new TypeError(`${message.header}: ${message.message}`) +} + +webidl.errors.conversionFailed = function (context) { + const plural = context.types.length === 1 ? '' : ' one of' + const message = + `${context.argument} could not be converted to` + + `${plural}: ${context.types.join(', ')}.` + + return webidl.errors.exception({ + header: context.prefix, + message + }) +} + +webidl.errors.invalidArgument = function (context) { + return webidl.errors.exception({ + header: context.prefix, + message: `"${context.value}" is an invalid ${context.type}.` + }) +} + +// https://webidl.spec.whatwg.org/#implements +webidl.brandCheck = function (V, I, opts = undefined) { + if (opts?.strict !== false && !(V instanceof I)) { + throw new TypeError('Illegal invocation') + } else { + return V?.[Symbol.toStringTag] === I.prototype[Symbol.toStringTag] + } +} + +webidl.argumentLengthCheck = function ({ length }, min, ctx) { + if (length < min) { + throw webidl.errors.exception({ + message: `${min} argument${min !== 1 ? 's' : ''} required, ` + + `but${length ? ' only' : ''} ${length} found.`, + ...ctx + }) + } +} + +webidl.illegalConstructor = function () { + throw webidl.errors.exception({ + header: 'TypeError', + message: 'Illegal constructor' + }) +} + +// https://tc39.es/ecma262/#sec-ecmascript-data-types-and-values +webidl.util.Type = function (V) { + switch (typeof V) { + case 'undefined': return 'Undefined' + case 'boolean': return 'Boolean' + case 'string': return 'String' + case 'symbol': return 'Symbol' + case 'number': return 'Number' + case 'bigint': return 'BigInt' + case 'function': + case 'object': { + if (V === null) { + return 'Null' + } + + return 'Object' + } + } +} + +// https://webidl.spec.whatwg.org/#abstract-opdef-converttoint +webidl.util.ConvertToInt = function (V, bitLength, signedness, opts = {}) { + let upperBound + let lowerBound + + // 1. If bitLength is 64, then: + if (bitLength === 64) { + // 1. Let upperBound be 2^53 − 1. + upperBound = Math.pow(2, 53) - 1 + + // 2. If signedness is "unsigned", then let lowerBound be 0. + if (signedness === 'unsigned') { + lowerBound = 0 + } else { + // 3. Otherwise let lowerBound be −2^53 + 1. + lowerBound = Math.pow(-2, 53) + 1 + } + } else if (signedness === 'unsigned') { + // 2. Otherwise, if signedness is "unsigned", then: + + // 1. Let lowerBound be 0. + lowerBound = 0 + + // 2. Let upperBound be 2^bitLength − 1. + upperBound = Math.pow(2, bitLength) - 1 + } else { + // 3. Otherwise: + + // 1. Let lowerBound be -2^bitLength − 1. + lowerBound = Math.pow(-2, bitLength) - 1 + + // 2. Let upperBound be 2^bitLength − 1 − 1. + upperBound = Math.pow(2, bitLength - 1) - 1 + } + + // 4. Let x be ? ToNumber(V). + let x = Number(V) + + // 5. If x is −0, then set x to +0. + if (x === 0) { + x = 0 + } + + // 6. If the conversion is to an IDL type associated + // with the [EnforceRange] extended attribute, then: + if (opts.enforceRange === true) { + // 1. If x is NaN, +∞, or −∞, then throw a TypeError. + if ( + Number.isNaN(x) || + x === Number.POSITIVE_INFINITY || + x === Number.NEGATIVE_INFINITY + ) { + throw webidl.errors.exception({ + header: 'Integer conversion', + message: `Could not convert ${V} to an integer.` + }) + } + + // 2. Set x to IntegerPart(x). + x = webidl.util.IntegerPart(x) + + // 3. If x < lowerBound or x > upperBound, then + // throw a TypeError. + if (x < lowerBound || x > upperBound) { + throw webidl.errors.exception({ + header: 'Integer conversion', + message: `Value must be between ${lowerBound}-${upperBound}, got ${x}.` + }) + } + + // 4. Return x. + return x + } + + // 7. If x is not NaN and the conversion is to an IDL + // type associated with the [Clamp] extended + // attribute, then: + if (!Number.isNaN(x) && opts.clamp === true) { + // 1. Set x to min(max(x, lowerBound), upperBound). + x = Math.min(Math.max(x, lowerBound), upperBound) + + // 2. Round x to the nearest integer, choosing the + // even integer if it lies halfway between two, + // and choosing +0 rather than −0. + if (Math.floor(x) % 2 === 0) { + x = Math.floor(x) + } else { + x = Math.ceil(x) + } + + // 3. Return x. + return x + } + + // 8. If x is NaN, +0, +∞, or −∞, then return +0. + if ( + Number.isNaN(x) || + (x === 0 && Object.is(0, x)) || + x === Number.POSITIVE_INFINITY || + x === Number.NEGATIVE_INFINITY + ) { + return 0 + } + + // 9. Set x to IntegerPart(x). + x = webidl.util.IntegerPart(x) + + // 10. Set x to x modulo 2^bitLength. + x = x % Math.pow(2, bitLength) + + // 11. If signedness is "signed" and x ≥ 2^bitLength − 1, + // then return x − 2^bitLength. + if (signedness === 'signed' && x >= Math.pow(2, bitLength) - 1) { + return x - Math.pow(2, bitLength) + } + + // 12. Otherwise, return x. + return x +} + +// https://webidl.spec.whatwg.org/#abstract-opdef-integerpart +webidl.util.IntegerPart = function (n) { + // 1. Let r be floor(abs(n)). + const r = Math.floor(Math.abs(n)) + + // 2. If n < 0, then return -1 × r. + if (n < 0) { + return -1 * r + } + + // 3. Otherwise, return r. + return r +} + +// https://webidl.spec.whatwg.org/#es-sequence +webidl.sequenceConverter = function (converter) { + return (V) => { + // 1. If Type(V) is not Object, throw a TypeError. + if (webidl.util.Type(V) !== 'Object') { + throw webidl.errors.exception({ + header: 'Sequence', + message: `Value of type ${webidl.util.Type(V)} is not an Object.` + }) + } + + // 2. Let method be ? GetMethod(V, @@iterator). + /** @type {Generator} */ + const method = V?.[Symbol.iterator]?.() + const seq = [] + + // 3. If method is undefined, throw a TypeError. + if ( + method === undefined || + typeof method.next !== 'function' + ) { + throw webidl.errors.exception({ + header: 'Sequence', + message: 'Object is not an iterator.' + }) + } + + // https://webidl.spec.whatwg.org/#create-sequence-from-iterable + while (true) { + const { done, value } = method.next() + + if (done) { + break + } + + seq.push(converter(value)) + } + + return seq + } +} + +// https://webidl.spec.whatwg.org/#es-to-record +webidl.recordConverter = function (keyConverter, valueConverter) { + return (O) => { + // 1. If Type(O) is not Object, throw a TypeError. + if (webidl.util.Type(O) !== 'Object') { + throw webidl.errors.exception({ + header: 'Record', + message: `Value of type ${webidl.util.Type(O)} is not an Object.` + }) + } + + // 2. Let result be a new empty instance of record. + const result = {} + + if (!types.isProxy(O)) { + // Object.keys only returns enumerable properties + const keys = Object.keys(O) + + for (const key of keys) { + // 1. Let typedKey be key converted to an IDL value of type K. + const typedKey = keyConverter(key) + + // 2. Let value be ? Get(O, key). + // 3. Let typedValue be value converted to an IDL value of type V. + const typedValue = valueConverter(O[key]) + + // 4. Set result[typedKey] to typedValue. + result[typedKey] = typedValue + } + + // 5. Return result. + return result + } + + // 3. Let keys be ? O.[[OwnPropertyKeys]](). + const keys = Reflect.ownKeys(O) + + // 4. For each key of keys. + for (const key of keys) { + // 1. Let desc be ? O.[[GetOwnProperty]](key). + const desc = Reflect.getOwnPropertyDescriptor(O, key) + + // 2. If desc is not undefined and desc.[[Enumerable]] is true: + if (desc?.enumerable) { + // 1. Let typedKey be key converted to an IDL value of type K. + const typedKey = keyConverter(key) + + // 2. Let value be ? Get(O, key). + // 3. Let typedValue be value converted to an IDL value of type V. + const typedValue = valueConverter(O[key]) + + // 4. Set result[typedKey] to typedValue. + result[typedKey] = typedValue + } + } + + // 5. Return result. + return result + } +} + +webidl.interfaceConverter = function (i) { + return (V, opts = {}) => { + if (opts.strict !== false && !(V instanceof i)) { + throw webidl.errors.exception({ + header: i.name, + message: `Expected ${V} to be an instance of ${i.name}.` + }) + } + + return V + } +} + +webidl.dictionaryConverter = function (converters) { + return (dictionary) => { + const type = webidl.util.Type(dictionary) + const dict = {} + + if (type === 'Null' || type === 'Undefined') { + return dict + } else if (type !== 'Object') { + throw webidl.errors.exception({ + header: 'Dictionary', + message: `Expected ${dictionary} to be one of: Null, Undefined, Object.` + }) + } + + for (const options of converters) { + const { key, defaultValue, required, converter } = options + + if (required === true) { + if (!hasOwn(dictionary, key)) { + throw webidl.errors.exception({ + header: 'Dictionary', + message: `Missing required key "${key}".` + }) + } + } + + let value = dictionary[key] + const hasDefault = hasOwn(options, 'defaultValue') + + // Only use defaultValue if value is undefined and + // a defaultValue options was provided. + if (hasDefault && value !== null) { + value = value ?? defaultValue + } + + // A key can be optional and have no default value. + // When this happens, do not perform a conversion, + // and do not assign the key a value. + if (required || hasDefault || value !== undefined) { + value = converter(value) + + if ( + options.allowedValues && + !options.allowedValues.includes(value) + ) { + throw webidl.errors.exception({ + header: 'Dictionary', + message: `${value} is not an accepted type. Expected one of ${options.allowedValues.join(', ')}.` + }) + } + + dict[key] = value + } + } + + return dict + } +} + +webidl.nullableConverter = function (converter) { + return (V) => { + if (V === null) { + return V + } + + return converter(V) + } +} + +// https://webidl.spec.whatwg.org/#es-DOMString +webidl.converters.DOMString = function (V, opts = {}) { + // 1. If V is null and the conversion is to an IDL type + // associated with the [LegacyNullToEmptyString] + // extended attribute, then return the DOMString value + // that represents the empty string. + if (V === null && opts.legacyNullToEmptyString) { + return '' + } + + // 2. Let x be ? ToString(V). + if (typeof V === 'symbol') { + throw new TypeError('Could not convert argument of type symbol to string.') + } + + // 3. Return the IDL DOMString value that represents the + // same sequence of code units as the one the + // ECMAScript String value x represents. + return String(V) +} + +// https://webidl.spec.whatwg.org/#es-ByteString +webidl.converters.ByteString = function (V) { + // 1. Let x be ? ToString(V). + // Note: DOMString converter perform ? ToString(V) + const x = webidl.converters.DOMString(V) + + // 2. If the value of any element of x is greater than + // 255, then throw a TypeError. + for (let index = 0; index < x.length; index++) { + if (x.charCodeAt(index) > 255) { + throw new TypeError( + 'Cannot convert argument to a ByteString because the character at ' + + `index ${index} has a value of ${x.charCodeAt(index)} which is greater than 255.` + ) + } + } + + // 3. Return an IDL ByteString value whose length is the + // length of x, and where the value of each element is + // the value of the corresponding element of x. + return x +} + +// https://webidl.spec.whatwg.org/#es-USVString +webidl.converters.USVString = toUSVString + +// https://webidl.spec.whatwg.org/#es-boolean +webidl.converters.boolean = function (V) { + // 1. Let x be the result of computing ToBoolean(V). + const x = Boolean(V) + + // 2. Return the IDL boolean value that is the one that represents + // the same truth value as the ECMAScript Boolean value x. + return x +} + +// https://webidl.spec.whatwg.org/#es-any +webidl.converters.any = function (V) { + return V +} + +// https://webidl.spec.whatwg.org/#es-long-long +webidl.converters['long long'] = function (V) { + // 1. Let x be ? ConvertToInt(V, 64, "signed"). + const x = webidl.util.ConvertToInt(V, 64, 'signed') + + // 2. Return the IDL long long value that represents + // the same numeric value as x. + return x +} + +// https://webidl.spec.whatwg.org/#es-unsigned-long-long +webidl.converters['unsigned long long'] = function (V) { + // 1. Let x be ? ConvertToInt(V, 64, "unsigned"). + const x = webidl.util.ConvertToInt(V, 64, 'unsigned') + + // 2. Return the IDL unsigned long long value that + // represents the same numeric value as x. + return x +} + +// https://webidl.spec.whatwg.org/#es-unsigned-long +webidl.converters['unsigned long'] = function (V) { + // 1. Let x be ? ConvertToInt(V, 32, "unsigned"). + const x = webidl.util.ConvertToInt(V, 32, 'unsigned') + + // 2. Return the IDL unsigned long value that + // represents the same numeric value as x. + return x +} + +// https://webidl.spec.whatwg.org/#es-unsigned-short +webidl.converters['unsigned short'] = function (V, opts) { + // 1. Let x be ? ConvertToInt(V, 16, "unsigned"). + const x = webidl.util.ConvertToInt(V, 16, 'unsigned', opts) + + // 2. Return the IDL unsigned short value that represents + // the same numeric value as x. + return x +} + +// https://webidl.spec.whatwg.org/#idl-ArrayBuffer +webidl.converters.ArrayBuffer = function (V, opts = {}) { + // 1. If Type(V) is not Object, or V does not have an + // [[ArrayBufferData]] internal slot, then throw a + // TypeError. + // see: https://tc39.es/ecma262/#sec-properties-of-the-arraybuffer-instances + // see: https://tc39.es/ecma262/#sec-properties-of-the-sharedarraybuffer-instances + if ( + webidl.util.Type(V) !== 'Object' || + !types.isAnyArrayBuffer(V) + ) { + throw webidl.errors.conversionFailed({ + prefix: `${V}`, + argument: `${V}`, + types: ['ArrayBuffer'] + }) + } + + // 2. If the conversion is not to an IDL type associated + // with the [AllowShared] extended attribute, and + // IsSharedArrayBuffer(V) is true, then throw a + // TypeError. + if (opts.allowShared === false && types.isSharedArrayBuffer(V)) { + throw webidl.errors.exception({ + header: 'ArrayBuffer', + message: 'SharedArrayBuffer is not allowed.' + }) + } + + // 3. If the conversion is not to an IDL type associated + // with the [AllowResizable] extended attribute, and + // IsResizableArrayBuffer(V) is true, then throw a + // TypeError. + // Note: resizable ArrayBuffers are currently a proposal. + + // 4. Return the IDL ArrayBuffer value that is a + // reference to the same object as V. + return V +} + +webidl.converters.TypedArray = function (V, T, opts = {}) { + // 1. Let T be the IDL type V is being converted to. + + // 2. If Type(V) is not Object, or V does not have a + // [[TypedArrayName]] internal slot with a value + // equal to T’s name, then throw a TypeError. + if ( + webidl.util.Type(V) !== 'Object' || + !types.isTypedArray(V) || + V.constructor.name !== T.name + ) { + throw webidl.errors.conversionFailed({ + prefix: `${T.name}`, + argument: `${V}`, + types: [T.name] + }) + } + + // 3. If the conversion is not to an IDL type associated + // with the [AllowShared] extended attribute, and + // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is + // true, then throw a TypeError. + if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) { + throw webidl.errors.exception({ + header: 'ArrayBuffer', + message: 'SharedArrayBuffer is not allowed.' + }) + } + + // 4. If the conversion is not to an IDL type associated + // with the [AllowResizable] extended attribute, and + // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is + // true, then throw a TypeError. + // Note: resizable array buffers are currently a proposal + + // 5. Return the IDL value of type T that is a reference + // to the same object as V. + return V +} + +webidl.converters.DataView = function (V, opts = {}) { + // 1. If Type(V) is not Object, or V does not have a + // [[DataView]] internal slot, then throw a TypeError. + if (webidl.util.Type(V) !== 'Object' || !types.isDataView(V)) { + throw webidl.errors.exception({ + header: 'DataView', + message: 'Object is not a DataView.' + }) + } + + // 2. If the conversion is not to an IDL type associated + // with the [AllowShared] extended attribute, and + // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is true, + // then throw a TypeError. + if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) { + throw webidl.errors.exception({ + header: 'ArrayBuffer', + message: 'SharedArrayBuffer is not allowed.' + }) + } + + // 3. If the conversion is not to an IDL type associated + // with the [AllowResizable] extended attribute, and + // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is + // true, then throw a TypeError. + // Note: resizable ArrayBuffers are currently a proposal + + // 4. Return the IDL DataView value that is a reference + // to the same object as V. + return V +} + +// https://webidl.spec.whatwg.org/#BufferSource +webidl.converters.BufferSource = function (V, opts = {}) { + if (types.isAnyArrayBuffer(V)) { + return webidl.converters.ArrayBuffer(V, opts) + } + + if (types.isTypedArray(V)) { + return webidl.converters.TypedArray(V, V.constructor) + } + + if (types.isDataView(V)) { + return webidl.converters.DataView(V, opts) + } + + throw new TypeError(`Could not convert ${V} to a BufferSource.`) +} + +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.ByteString +) + +webidl.converters['sequence>'] = webidl.sequenceConverter( + webidl.converters['sequence'] +) + +webidl.converters['record'] = webidl.recordConverter( + webidl.converters.ByteString, + webidl.converters.ByteString +) + +module.exports = { + webidl +} + + +/***/ }), + +/***/ 396: +/***/ ((module) => { + +"use strict"; + + +/** + * @see https://encoding.spec.whatwg.org/#concept-encoding-get + * @param {string|undefined} label + */ +function getEncoding (label) { + if (!label) { + return 'failure' + } + + // 1. Remove any leading and trailing ASCII whitespace from label. + // 2. If label is an ASCII case-insensitive match for any of the + // labels listed in the table below, then return the + // corresponding encoding; otherwise return failure. + switch (label.trim().toLowerCase()) { + case 'unicode-1-1-utf-8': + case 'unicode11utf8': + case 'unicode20utf8': + case 'utf-8': + case 'utf8': + case 'x-unicode20utf8': + return 'UTF-8' + case '866': + case 'cp866': + case 'csibm866': + case 'ibm866': + return 'IBM866' + case 'csisolatin2': + case 'iso-8859-2': + case 'iso-ir-101': + case 'iso8859-2': + case 'iso88592': + case 'iso_8859-2': + case 'iso_8859-2:1987': + case 'l2': + case 'latin2': + return 'ISO-8859-2' + case 'csisolatin3': + case 'iso-8859-3': + case 'iso-ir-109': + case 'iso8859-3': + case 'iso88593': + case 'iso_8859-3': + case 'iso_8859-3:1988': + case 'l3': + case 'latin3': + return 'ISO-8859-3' + case 'csisolatin4': + case 'iso-8859-4': + case 'iso-ir-110': + case 'iso8859-4': + case 'iso88594': + case 'iso_8859-4': + case 'iso_8859-4:1988': + case 'l4': + case 'latin4': + return 'ISO-8859-4' + case 'csisolatincyrillic': + case 'cyrillic': + case 'iso-8859-5': + case 'iso-ir-144': + case 'iso8859-5': + case 'iso88595': + case 'iso_8859-5': + case 'iso_8859-5:1988': + return 'ISO-8859-5' + case 'arabic': + case 'asmo-708': + case 'csiso88596e': + case 'csiso88596i': + case 'csisolatinarabic': + case 'ecma-114': + case 'iso-8859-6': + case 'iso-8859-6-e': + case 'iso-8859-6-i': + case 'iso-ir-127': + case 'iso8859-6': + case 'iso88596': + case 'iso_8859-6': + case 'iso_8859-6:1987': + return 'ISO-8859-6' + case 'csisolatingreek': + case 'ecma-118': + case 'elot_928': + case 'greek': + case 'greek8': + case 'iso-8859-7': + case 'iso-ir-126': + case 'iso8859-7': + case 'iso88597': + case 'iso_8859-7': + case 'iso_8859-7:1987': + case 'sun_eu_greek': + return 'ISO-8859-7' + case 'csiso88598e': + case 'csisolatinhebrew': + case 'hebrew': + case 'iso-8859-8': + case 'iso-8859-8-e': + case 'iso-ir-138': + case 'iso8859-8': + case 'iso88598': + case 'iso_8859-8': + case 'iso_8859-8:1988': + case 'visual': + return 'ISO-8859-8' + case 'csiso88598i': + case 'iso-8859-8-i': + case 'logical': + return 'ISO-8859-8-I' + case 'csisolatin6': + case 'iso-8859-10': + case 'iso-ir-157': + case 'iso8859-10': + case 'iso885910': + case 'l6': + case 'latin6': + return 'ISO-8859-10' + case 'iso-8859-13': + case 'iso8859-13': + case 'iso885913': + return 'ISO-8859-13' + case 'iso-8859-14': + case 'iso8859-14': + case 'iso885914': + return 'ISO-8859-14' + case 'csisolatin9': + case 'iso-8859-15': + case 'iso8859-15': + case 'iso885915': + case 'iso_8859-15': + case 'l9': + return 'ISO-8859-15' + case 'iso-8859-16': + return 'ISO-8859-16' + case 'cskoi8r': + case 'koi': + case 'koi8': + case 'koi8-r': + case 'koi8_r': + return 'KOI8-R' + case 'koi8-ru': + case 'koi8-u': + return 'KOI8-U' + case 'csmacintosh': + case 'mac': + case 'macintosh': + case 'x-mac-roman': + return 'macintosh' + case 'iso-8859-11': + case 'iso8859-11': + case 'iso885911': + case 'tis-620': + case 'windows-874': + return 'windows-874' + case 'cp1250': + case 'windows-1250': + case 'x-cp1250': + return 'windows-1250' + case 'cp1251': + case 'windows-1251': + case 'x-cp1251': + return 'windows-1251' + case 'ansi_x3.4-1968': + case 'ascii': + case 'cp1252': + case 'cp819': + case 'csisolatin1': + case 'ibm819': + case 'iso-8859-1': + case 'iso-ir-100': + case 'iso8859-1': + case 'iso88591': + case 'iso_8859-1': + case 'iso_8859-1:1987': + case 'l1': + case 'latin1': + case 'us-ascii': + case 'windows-1252': + case 'x-cp1252': + return 'windows-1252' + case 'cp1253': + case 'windows-1253': + case 'x-cp1253': + return 'windows-1253' + case 'cp1254': + case 'csisolatin5': + case 'iso-8859-9': + case 'iso-ir-148': + case 'iso8859-9': + case 'iso88599': + case 'iso_8859-9': + case 'iso_8859-9:1989': + case 'l5': + case 'latin5': + case 'windows-1254': + case 'x-cp1254': + return 'windows-1254' + case 'cp1255': + case 'windows-1255': + case 'x-cp1255': + return 'windows-1255' + case 'cp1256': + case 'windows-1256': + case 'x-cp1256': + return 'windows-1256' + case 'cp1257': + case 'windows-1257': + case 'x-cp1257': + return 'windows-1257' + case 'cp1258': + case 'windows-1258': + case 'x-cp1258': + return 'windows-1258' + case 'x-mac-cyrillic': + case 'x-mac-ukrainian': + return 'x-mac-cyrillic' + case 'chinese': + case 'csgb2312': + case 'csiso58gb231280': + case 'gb2312': + case 'gb_2312': + case 'gb_2312-80': + case 'gbk': + case 'iso-ir-58': + case 'x-gbk': + return 'GBK' + case 'gb18030': + return 'gb18030' + case 'big5': + case 'big5-hkscs': + case 'cn-big5': + case 'csbig5': + case 'x-x-big5': + return 'Big5' + case 'cseucpkdfmtjapanese': + case 'euc-jp': + case 'x-euc-jp': + return 'EUC-JP' + case 'csiso2022jp': + case 'iso-2022-jp': + return 'ISO-2022-JP' + case 'csshiftjis': + case 'ms932': + case 'ms_kanji': + case 'shift-jis': + case 'shift_jis': + case 'sjis': + case 'windows-31j': + case 'x-sjis': + return 'Shift_JIS' + case 'cseuckr': + case 'csksc56011987': + case 'euc-kr': + case 'iso-ir-149': + case 'korean': + case 'ks_c_5601-1987': + case 'ks_c_5601-1989': + case 'ksc5601': + case 'ksc_5601': + case 'windows-949': + return 'EUC-KR' + case 'csiso2022kr': + case 'hz-gb-2312': + case 'iso-2022-cn': + case 'iso-2022-cn-ext': + case 'iso-2022-kr': + case 'replacement': + return 'replacement' + case 'unicodefffe': + case 'utf-16be': + return 'UTF-16BE' + case 'csunicode': + case 'iso-10646-ucs-2': + case 'ucs-2': + case 'unicode': + case 'unicodefeff': + case 'utf-16': + case 'utf-16le': + return 'UTF-16LE' + case 'x-user-defined': + return 'x-user-defined' + default: return 'failure' + } +} + +module.exports = { + getEncoding +} + + +/***/ }), + +/***/ 2160: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { + staticPropertyDescriptors, + readOperation, + fireAProgressEvent +} = __nccwpck_require__(165) +const { + kState, + kError, + kResult, + kEvents, + kAborted +} = __nccwpck_require__(6812) +const { webidl } = __nccwpck_require__(4222) +const { kEnumerableProperty } = __nccwpck_require__(3440) + +class FileReader extends EventTarget { + constructor () { + super() + + this[kState] = 'empty' + this[kResult] = null + this[kError] = null + this[kEvents] = { + loadend: null, + error: null, + abort: null, + load: null, + progress: null, + loadstart: null + } + } + + /** + * @see https://w3c.github.io/FileAPI/#dfn-readAsArrayBuffer + * @param {import('buffer').Blob} blob + */ + readAsArrayBuffer (blob) { + webidl.brandCheck(this, FileReader) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsArrayBuffer' }) + + blob = webidl.converters.Blob(blob, { strict: false }) + + // The readAsArrayBuffer(blob) method, when invoked, + // must initiate a read operation for blob with ArrayBuffer. + readOperation(this, blob, 'ArrayBuffer') + } + + /** + * @see https://w3c.github.io/FileAPI/#readAsBinaryString + * @param {import('buffer').Blob} blob + */ + readAsBinaryString (blob) { + webidl.brandCheck(this, FileReader) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsBinaryString' }) + + blob = webidl.converters.Blob(blob, { strict: false }) + + // The readAsBinaryString(blob) method, when invoked, + // must initiate a read operation for blob with BinaryString. + readOperation(this, blob, 'BinaryString') + } + + /** + * @see https://w3c.github.io/FileAPI/#readAsDataText + * @param {import('buffer').Blob} blob + * @param {string?} encoding + */ + readAsText (blob, encoding = undefined) { + webidl.brandCheck(this, FileReader) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsText' }) + + blob = webidl.converters.Blob(blob, { strict: false }) + + if (encoding !== undefined) { + encoding = webidl.converters.DOMString(encoding) + } + + // The readAsText(blob, encoding) method, when invoked, + // must initiate a read operation for blob with Text and encoding. + readOperation(this, blob, 'Text', encoding) + } + + /** + * @see https://w3c.github.io/FileAPI/#dfn-readAsDataURL + * @param {import('buffer').Blob} blob + */ + readAsDataURL (blob) { + webidl.brandCheck(this, FileReader) + + webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsDataURL' }) + + blob = webidl.converters.Blob(blob, { strict: false }) + + // The readAsDataURL(blob) method, when invoked, must + // initiate a read operation for blob with DataURL. + readOperation(this, blob, 'DataURL') + } + + /** + * @see https://w3c.github.io/FileAPI/#dfn-abort + */ + abort () { + // 1. If this's state is "empty" or if this's state is + // "done" set this's result to null and terminate + // this algorithm. + if (this[kState] === 'empty' || this[kState] === 'done') { + this[kResult] = null + return + } + + // 2. If this's state is "loading" set this's state to + // "done" and set this's result to null. + if (this[kState] === 'loading') { + this[kState] = 'done' + this[kResult] = null + } + + // 3. If there are any tasks from this on the file reading + // task source in an affiliated task queue, then remove + // those tasks from that task queue. + this[kAborted] = true + + // 4. Terminate the algorithm for the read method being processed. + // TODO + + // 5. Fire a progress event called abort at this. + fireAProgressEvent('abort', this) + + // 6. If this's state is not "loading", fire a progress + // event called loadend at this. + if (this[kState] !== 'loading') { + fireAProgressEvent('loadend', this) + } + } + + /** + * @see https://w3c.github.io/FileAPI/#dom-filereader-readystate + */ + get readyState () { + webidl.brandCheck(this, FileReader) + + switch (this[kState]) { + case 'empty': return this.EMPTY + case 'loading': return this.LOADING + case 'done': return this.DONE + } + } + + /** + * @see https://w3c.github.io/FileAPI/#dom-filereader-result + */ + get result () { + webidl.brandCheck(this, FileReader) + + // The result attribute’s getter, when invoked, must return + // this's result. + return this[kResult] + } + + /** + * @see https://w3c.github.io/FileAPI/#dom-filereader-error + */ + get error () { + webidl.brandCheck(this, FileReader) + + // The error attribute’s getter, when invoked, must return + // this's error. + return this[kError] + } + + get onloadend () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].loadend + } + + set onloadend (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].loadend) { + this.removeEventListener('loadend', this[kEvents].loadend) + } + + if (typeof fn === 'function') { + this[kEvents].loadend = fn + this.addEventListener('loadend', fn) + } else { + this[kEvents].loadend = null + } + } + + get onerror () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].error + } + + set onerror (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].error) { + this.removeEventListener('error', this[kEvents].error) + } + + if (typeof fn === 'function') { + this[kEvents].error = fn + this.addEventListener('error', fn) + } else { + this[kEvents].error = null + } + } + + get onloadstart () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].loadstart + } + + set onloadstart (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].loadstart) { + this.removeEventListener('loadstart', this[kEvents].loadstart) + } + + if (typeof fn === 'function') { + this[kEvents].loadstart = fn + this.addEventListener('loadstart', fn) + } else { + this[kEvents].loadstart = null + } + } + + get onprogress () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].progress + } + + set onprogress (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].progress) { + this.removeEventListener('progress', this[kEvents].progress) + } + + if (typeof fn === 'function') { + this[kEvents].progress = fn + this.addEventListener('progress', fn) + } else { + this[kEvents].progress = null + } + } + + get onload () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].load + } + + set onload (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].load) { + this.removeEventListener('load', this[kEvents].load) + } + + if (typeof fn === 'function') { + this[kEvents].load = fn + this.addEventListener('load', fn) + } else { + this[kEvents].load = null + } + } + + get onabort () { + webidl.brandCheck(this, FileReader) + + return this[kEvents].abort + } + + set onabort (fn) { + webidl.brandCheck(this, FileReader) + + if (this[kEvents].abort) { + this.removeEventListener('abort', this[kEvents].abort) + } + + if (typeof fn === 'function') { + this[kEvents].abort = fn + this.addEventListener('abort', fn) + } else { + this[kEvents].abort = null + } + } +} + +// https://w3c.github.io/FileAPI/#dom-filereader-empty +FileReader.EMPTY = FileReader.prototype.EMPTY = 0 +// https://w3c.github.io/FileAPI/#dom-filereader-loading +FileReader.LOADING = FileReader.prototype.LOADING = 1 +// https://w3c.github.io/FileAPI/#dom-filereader-done +FileReader.DONE = FileReader.prototype.DONE = 2 + +Object.defineProperties(FileReader.prototype, { + EMPTY: staticPropertyDescriptors, + LOADING: staticPropertyDescriptors, + DONE: staticPropertyDescriptors, + readAsArrayBuffer: kEnumerableProperty, + readAsBinaryString: kEnumerableProperty, + readAsText: kEnumerableProperty, + readAsDataURL: kEnumerableProperty, + abort: kEnumerableProperty, + readyState: kEnumerableProperty, + result: kEnumerableProperty, + error: kEnumerableProperty, + onloadstart: kEnumerableProperty, + onprogress: kEnumerableProperty, + onload: kEnumerableProperty, + onabort: kEnumerableProperty, + onerror: kEnumerableProperty, + onloadend: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'FileReader', + writable: false, + enumerable: false, + configurable: true + } +}) + +Object.defineProperties(FileReader, { + EMPTY: staticPropertyDescriptors, + LOADING: staticPropertyDescriptors, + DONE: staticPropertyDescriptors +}) + +module.exports = { + FileReader +} + + +/***/ }), + +/***/ 5976: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { webidl } = __nccwpck_require__(4222) + +const kState = Symbol('ProgressEvent state') + +/** + * @see https://xhr.spec.whatwg.org/#progressevent + */ +class ProgressEvent extends Event { + constructor (type, eventInitDict = {}) { + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.ProgressEventInit(eventInitDict ?? {}) + + super(type, eventInitDict) + + this[kState] = { + lengthComputable: eventInitDict.lengthComputable, + loaded: eventInitDict.loaded, + total: eventInitDict.total + } + } + + get lengthComputable () { + webidl.brandCheck(this, ProgressEvent) + + return this[kState].lengthComputable + } + + get loaded () { + webidl.brandCheck(this, ProgressEvent) + + return this[kState].loaded + } + + get total () { + webidl.brandCheck(this, ProgressEvent) + + return this[kState].total + } +} + +webidl.converters.ProgressEventInit = webidl.dictionaryConverter([ + { + key: 'lengthComputable', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'loaded', + converter: webidl.converters['unsigned long long'], + defaultValue: 0 + }, + { + key: 'total', + converter: webidl.converters['unsigned long long'], + defaultValue: 0 + }, + { + key: 'bubbles', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'cancelable', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'composed', + converter: webidl.converters.boolean, + defaultValue: false + } +]) + +module.exports = { + ProgressEvent +} + + +/***/ }), + +/***/ 6812: +/***/ ((module) => { + +"use strict"; + + +module.exports = { + kState: Symbol('FileReader state'), + kResult: Symbol('FileReader result'), + kError: Symbol('FileReader error'), + kLastProgressEventFired: Symbol('FileReader last progress event fired timestamp'), + kEvents: Symbol('FileReader events'), + kAborted: Symbol('FileReader aborted') +} + + +/***/ }), + +/***/ 165: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { + kState, + kError, + kResult, + kAborted, + kLastProgressEventFired +} = __nccwpck_require__(6812) +const { ProgressEvent } = __nccwpck_require__(5976) +const { getEncoding } = __nccwpck_require__(396) +const { DOMException } = __nccwpck_require__(7326) +const { serializeAMimeType, parseMIMEType } = __nccwpck_require__(4322) +const { types } = __nccwpck_require__(9023) +const { StringDecoder } = __nccwpck_require__(3193) +const { btoa } = __nccwpck_require__(181) + +/** @type {PropertyDescriptor} */ +const staticPropertyDescriptors = { + enumerable: true, + writable: false, + configurable: false +} + +/** + * @see https://w3c.github.io/FileAPI/#readOperation + * @param {import('./filereader').FileReader} fr + * @param {import('buffer').Blob} blob + * @param {string} type + * @param {string?} encodingName + */ +function readOperation (fr, blob, type, encodingName) { + // 1. If fr’s state is "loading", throw an InvalidStateError + // DOMException. + if (fr[kState] === 'loading') { + throw new DOMException('Invalid state', 'InvalidStateError') + } + + // 2. Set fr’s state to "loading". + fr[kState] = 'loading' + + // 3. Set fr’s result to null. + fr[kResult] = null + + // 4. Set fr’s error to null. + fr[kError] = null + + // 5. Let stream be the result of calling get stream on blob. + /** @type {import('stream/web').ReadableStream} */ + const stream = blob.stream() + + // 6. Let reader be the result of getting a reader from stream. + const reader = stream.getReader() + + // 7. Let bytes be an empty byte sequence. + /** @type {Uint8Array[]} */ + const bytes = [] + + // 8. Let chunkPromise be the result of reading a chunk from + // stream with reader. + let chunkPromise = reader.read() + + // 9. Let isFirstChunk be true. + let isFirstChunk = true + + // 10. In parallel, while true: + // Note: "In parallel" just means non-blocking + // Note 2: readOperation itself cannot be async as double + // reading the body would then reject the promise, instead + // of throwing an error. + ;(async () => { + while (!fr[kAborted]) { + // 1. Wait for chunkPromise to be fulfilled or rejected. + try { + const { done, value } = await chunkPromise + + // 2. If chunkPromise is fulfilled, and isFirstChunk is + // true, queue a task to fire a progress event called + // loadstart at fr. + if (isFirstChunk && !fr[kAborted]) { + queueMicrotask(() => { + fireAProgressEvent('loadstart', fr) + }) + } + + // 3. Set isFirstChunk to false. + isFirstChunk = false + + // 4. If chunkPromise is fulfilled with an object whose + // done property is false and whose value property is + // a Uint8Array object, run these steps: + if (!done && types.isUint8Array(value)) { + // 1. Let bs be the byte sequence represented by the + // Uint8Array object. + + // 2. Append bs to bytes. + bytes.push(value) + + // 3. If roughly 50ms have passed since these steps + // were last invoked, queue a task to fire a + // progress event called progress at fr. + if ( + ( + fr[kLastProgressEventFired] === undefined || + Date.now() - fr[kLastProgressEventFired] >= 50 + ) && + !fr[kAborted] + ) { + fr[kLastProgressEventFired] = Date.now() + queueMicrotask(() => { + fireAProgressEvent('progress', fr) + }) + } + + // 4. Set chunkPromise to the result of reading a + // chunk from stream with reader. + chunkPromise = reader.read() + } else if (done) { + // 5. Otherwise, if chunkPromise is fulfilled with an + // object whose done property is true, queue a task + // to run the following steps and abort this algorithm: + queueMicrotask(() => { + // 1. Set fr’s state to "done". + fr[kState] = 'done' + + // 2. Let result be the result of package data given + // bytes, type, blob’s type, and encodingName. + try { + const result = packageData(bytes, type, blob.type, encodingName) + + // 4. Else: + + if (fr[kAborted]) { + return + } + + // 1. Set fr’s result to result. + fr[kResult] = result + + // 2. Fire a progress event called load at the fr. + fireAProgressEvent('load', fr) + } catch (error) { + // 3. If package data threw an exception error: + + // 1. Set fr’s error to error. + fr[kError] = error + + // 2. Fire a progress event called error at fr. + fireAProgressEvent('error', fr) + } + + // 5. If fr’s state is not "loading", fire a progress + // event called loadend at the fr. + if (fr[kState] !== 'loading') { + fireAProgressEvent('loadend', fr) + } + }) + + break + } + } catch (error) { + if (fr[kAborted]) { + return + } + + // 6. Otherwise, if chunkPromise is rejected with an + // error error, queue a task to run the following + // steps and abort this algorithm: + queueMicrotask(() => { + // 1. Set fr’s state to "done". + fr[kState] = 'done' + + // 2. Set fr’s error to error. + fr[kError] = error + + // 3. Fire a progress event called error at fr. + fireAProgressEvent('error', fr) + + // 4. If fr’s state is not "loading", fire a progress + // event called loadend at fr. + if (fr[kState] !== 'loading') { + fireAProgressEvent('loadend', fr) + } + }) + + break + } + } + })() +} + +/** + * @see https://w3c.github.io/FileAPI/#fire-a-progress-event + * @see https://dom.spec.whatwg.org/#concept-event-fire + * @param {string} e The name of the event + * @param {import('./filereader').FileReader} reader + */ +function fireAProgressEvent (e, reader) { + // The progress event e does not bubble. e.bubbles must be false + // The progress event e is NOT cancelable. e.cancelable must be false + const event = new ProgressEvent(e, { + bubbles: false, + cancelable: false + }) + + reader.dispatchEvent(event) +} + +/** + * @see https://w3c.github.io/FileAPI/#blob-package-data + * @param {Uint8Array[]} bytes + * @param {string} type + * @param {string?} mimeType + * @param {string?} encodingName + */ +function packageData (bytes, type, mimeType, encodingName) { + // 1. A Blob has an associated package data algorithm, given + // bytes, a type, a optional mimeType, and a optional + // encodingName, which switches on type and runs the + // associated steps: + + switch (type) { + case 'DataURL': { + // 1. Return bytes as a DataURL [RFC2397] subject to + // the considerations below: + // * Use mimeType as part of the Data URL if it is + // available in keeping with the Data URL + // specification [RFC2397]. + // * If mimeType is not available return a Data URL + // without a media-type. [RFC2397]. + + // https://datatracker.ietf.org/doc/html/rfc2397#section-3 + // dataurl := "data:" [ mediatype ] [ ";base64" ] "," data + // mediatype := [ type "/" subtype ] *( ";" parameter ) + // data := *urlchar + // parameter := attribute "=" value + let dataURL = 'data:' + + const parsed = parseMIMEType(mimeType || 'application/octet-stream') + + if (parsed !== 'failure') { + dataURL += serializeAMimeType(parsed) + } + + dataURL += ';base64,' + + const decoder = new StringDecoder('latin1') + + for (const chunk of bytes) { + dataURL += btoa(decoder.write(chunk)) + } + + dataURL += btoa(decoder.end()) + + return dataURL + } + case 'Text': { + // 1. Let encoding be failure + let encoding = 'failure' + + // 2. If the encodingName is present, set encoding to the + // result of getting an encoding from encodingName. + if (encodingName) { + encoding = getEncoding(encodingName) + } + + // 3. If encoding is failure, and mimeType is present: + if (encoding === 'failure' && mimeType) { + // 1. Let type be the result of parse a MIME type + // given mimeType. + const type = parseMIMEType(mimeType) + + // 2. If type is not failure, set encoding to the result + // of getting an encoding from type’s parameters["charset"]. + if (type !== 'failure') { + encoding = getEncoding(type.parameters.get('charset')) + } + } + + // 4. If encoding is failure, then set encoding to UTF-8. + if (encoding === 'failure') { + encoding = 'UTF-8' + } + + // 5. Decode bytes using fallback encoding encoding, and + // return the result. + return decode(bytes, encoding) + } + case 'ArrayBuffer': { + // Return a new ArrayBuffer whose contents are bytes. + const sequence = combineByteSequences(bytes) + + return sequence.buffer + } + case 'BinaryString': { + // Return bytes as a binary string, in which every byte + // is represented by a code unit of equal value [0..255]. + let binaryString = '' + + const decoder = new StringDecoder('latin1') + + for (const chunk of bytes) { + binaryString += decoder.write(chunk) + } + + binaryString += decoder.end() + + return binaryString + } + } +} + +/** + * @see https://encoding.spec.whatwg.org/#decode + * @param {Uint8Array[]} ioQueue + * @param {string} encoding + */ +function decode (ioQueue, encoding) { + const bytes = combineByteSequences(ioQueue) + + // 1. Let BOMEncoding be the result of BOM sniffing ioQueue. + const BOMEncoding = BOMSniffing(bytes) + + let slice = 0 + + // 2. If BOMEncoding is non-null: + if (BOMEncoding !== null) { + // 1. Set encoding to BOMEncoding. + encoding = BOMEncoding + + // 2. Read three bytes from ioQueue, if BOMEncoding is + // UTF-8; otherwise read two bytes. + // (Do nothing with those bytes.) + slice = BOMEncoding === 'UTF-8' ? 3 : 2 + } + + // 3. Process a queue with an instance of encoding’s + // decoder, ioQueue, output, and "replacement". + + // 4. Return output. + + const sliced = bytes.slice(slice) + return new TextDecoder(encoding).decode(sliced) +} + +/** + * @see https://encoding.spec.whatwg.org/#bom-sniff + * @param {Uint8Array} ioQueue + */ +function BOMSniffing (ioQueue) { + // 1. Let BOM be the result of peeking 3 bytes from ioQueue, + // converted to a byte sequence. + const [a, b, c] = ioQueue + + // 2. For each of the rows in the table below, starting with + // the first one and going down, if BOM starts with the + // bytes given in the first column, then return the + // encoding given in the cell in the second column of that + // row. Otherwise, return null. + if (a === 0xEF && b === 0xBB && c === 0xBF) { + return 'UTF-8' + } else if (a === 0xFE && b === 0xFF) { + return 'UTF-16BE' + } else if (a === 0xFF && b === 0xFE) { + return 'UTF-16LE' + } + + return null +} + +/** + * @param {Uint8Array[]} sequences + */ +function combineByteSequences (sequences) { + const size = sequences.reduce((a, b) => { + return a + b.byteLength + }, 0) + + let offset = 0 + + return sequences.reduce((a, b) => { + a.set(b, offset) + offset += b.byteLength + return a + }, new Uint8Array(size)) +} + +module.exports = { + staticPropertyDescriptors, + readOperation, + fireAProgressEvent +} + + +/***/ }), + +/***/ 2581: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +// We include a version number for the Dispatcher API. In case of breaking changes, +// this version number must be increased to avoid conflicts. +const globalDispatcher = Symbol.for('undici.globalDispatcher.1') +const { InvalidArgumentError } = __nccwpck_require__(8707) +const Agent = __nccwpck_require__(9965) + +if (getGlobalDispatcher() === undefined) { + setGlobalDispatcher(new Agent()) +} + +function setGlobalDispatcher (agent) { + if (!agent || typeof agent.dispatch !== 'function') { + throw new InvalidArgumentError('Argument agent must implement Agent') + } + Object.defineProperty(globalThis, globalDispatcher, { + value: agent, + writable: true, + enumerable: false, + configurable: false + }) +} + +function getGlobalDispatcher () { + return globalThis[globalDispatcher] +} + +module.exports = { + setGlobalDispatcher, + getGlobalDispatcher +} + + +/***/ }), + +/***/ 8840: +/***/ ((module) => { + +"use strict"; + + +module.exports = class DecoratorHandler { + constructor (handler) { + this.handler = handler + } + + onConnect (...args) { + return this.handler.onConnect(...args) + } + + onError (...args) { + return this.handler.onError(...args) + } + + onUpgrade (...args) { + return this.handler.onUpgrade(...args) + } + + onHeaders (...args) { + return this.handler.onHeaders(...args) + } + + onData (...args) { + return this.handler.onData(...args) + } + + onComplete (...args) { + return this.handler.onComplete(...args) + } + + onBodySent (...args) { + return this.handler.onBodySent(...args) + } +} + + +/***/ }), + +/***/ 8299: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const util = __nccwpck_require__(3440) +const { kBodyUsed } = __nccwpck_require__(6443) +const assert = __nccwpck_require__(2613) +const { InvalidArgumentError } = __nccwpck_require__(8707) +const EE = __nccwpck_require__(4434) + +const redirectableStatusCodes = [300, 301, 302, 303, 307, 308] + +const kBody = Symbol('body') + +class BodyAsyncIterable { + constructor (body) { + this[kBody] = body + this[kBodyUsed] = false + } + + async * [Symbol.asyncIterator] () { + assert(!this[kBodyUsed], 'disturbed') + this[kBodyUsed] = true + yield * this[kBody] + } +} + +class RedirectHandler { + constructor (dispatch, maxRedirections, opts, handler) { + if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) { + throw new InvalidArgumentError('maxRedirections must be a positive number') + } + + util.validateHandler(handler, opts.method, opts.upgrade) + + this.dispatch = dispatch + this.location = null + this.abort = null + this.opts = { ...opts, maxRedirections: 0 } // opts must be a copy + this.maxRedirections = maxRedirections + this.handler = handler + this.history = [] + + if (util.isStream(this.opts.body)) { + // TODO (fix): Provide some way for the user to cache the file to e.g. /tmp + // so that it can be dispatched again? + // TODO (fix): Do we need 100-expect support to provide a way to do this properly? + if (util.bodyLength(this.opts.body) === 0) { + this.opts.body + .on('data', function () { + assert(false) + }) + } + + if (typeof this.opts.body.readableDidRead !== 'boolean') { + this.opts.body[kBodyUsed] = false + EE.prototype.on.call(this.opts.body, 'data', function () { + this[kBodyUsed] = true + }) + } + } else if (this.opts.body && typeof this.opts.body.pipeTo === 'function') { + // TODO (fix): We can't access ReadableStream internal state + // to determine whether or not it has been disturbed. This is just + // a workaround. + this.opts.body = new BodyAsyncIterable(this.opts.body) + } else if ( + this.opts.body && + typeof this.opts.body !== 'string' && + !ArrayBuffer.isView(this.opts.body) && + util.isIterable(this.opts.body) + ) { + // TODO: Should we allow re-using iterable if !this.opts.idempotent + // or through some other flag? + this.opts.body = new BodyAsyncIterable(this.opts.body) + } + } + + onConnect (abort) { + this.abort = abort + this.handler.onConnect(abort, { history: this.history }) + } + + onUpgrade (statusCode, headers, socket) { + this.handler.onUpgrade(statusCode, headers, socket) + } + + onError (error) { + this.handler.onError(error) + } + + onHeaders (statusCode, headers, resume, statusText) { + this.location = this.history.length >= this.maxRedirections || util.isDisturbed(this.opts.body) + ? null + : parseLocation(statusCode, headers) + + if (this.opts.origin) { + this.history.push(new URL(this.opts.path, this.opts.origin)) + } + + if (!this.location) { + return this.handler.onHeaders(statusCode, headers, resume, statusText) + } + + const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))) + const path = search ? `${pathname}${search}` : pathname + + // Remove headers referring to the original URL. + // By default it is Host only, unless it's a 303 (see below), which removes also all Content-* headers. + // https://tools.ietf.org/html/rfc7231#section-6.4 + this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin) + this.opts.path = path + this.opts.origin = origin + this.opts.maxRedirections = 0 + this.opts.query = null + + // https://tools.ietf.org/html/rfc7231#section-6.4.4 + // In case of HTTP 303, always replace method to be either HEAD or GET + if (statusCode === 303 && this.opts.method !== 'HEAD') { + this.opts.method = 'GET' + this.opts.body = null + } + } + + onData (chunk) { + if (this.location) { + /* + https://tools.ietf.org/html/rfc7231#section-6.4 + + TLDR: undici always ignores 3xx response bodies. + + Redirection is used to serve the requested resource from another URL, so it is assumes that + no body is generated (and thus can be ignored). Even though generating a body is not prohibited. + + For status 301, 302, 303, 307 and 308 (the latter from RFC 7238), the specs mention that the body usually + (which means it's optional and not mandated) contain just an hyperlink to the value of + the Location response header, so the body can be ignored safely. + + For status 300, which is "Multiple Choices", the spec mentions both generating a Location + response header AND a response body with the other possible location to follow. + Since the spec explicitily chooses not to specify a format for such body and leave it to + servers and browsers implementors, we ignore the body as there is no specified way to eventually parse it. + */ + } else { + return this.handler.onData(chunk) + } + } + + onComplete (trailers) { + if (this.location) { + /* + https://tools.ietf.org/html/rfc7231#section-6.4 + + TLDR: undici always ignores 3xx response trailers as they are not expected in case of redirections + and neither are useful if present. + + See comment on onData method above for more detailed informations. + */ + + this.location = null + this.abort = null + + this.dispatch(this.opts, this) + } else { + this.handler.onComplete(trailers) + } + } + + onBodySent (chunk) { + if (this.handler.onBodySent) { + this.handler.onBodySent(chunk) + } + } +} + +function parseLocation (statusCode, headers) { + if (redirectableStatusCodes.indexOf(statusCode) === -1) { + return null + } + + for (let i = 0; i < headers.length; i += 2) { + if (headers[i].toString().toLowerCase() === 'location') { + return headers[i + 1] + } + } +} + +// https://tools.ietf.org/html/rfc7231#section-6.4.4 +function shouldRemoveHeader (header, removeContent, unknownOrigin) { + if (header.length === 4) { + return util.headerNameToString(header) === 'host' + } + if (removeContent && util.headerNameToString(header).startsWith('content-')) { + return true + } + if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) { + const name = util.headerNameToString(header) + return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization' + } + return false +} + +// https://tools.ietf.org/html/rfc7231#section-6.4 +function cleanRequestHeaders (headers, removeContent, unknownOrigin) { + const ret = [] + if (Array.isArray(headers)) { + for (let i = 0; i < headers.length; i += 2) { + if (!shouldRemoveHeader(headers[i], removeContent, unknownOrigin)) { + ret.push(headers[i], headers[i + 1]) + } + } + } else if (headers && typeof headers === 'object') { + for (const key of Object.keys(headers)) { + if (!shouldRemoveHeader(key, removeContent, unknownOrigin)) { + ret.push(key, headers[key]) + } + } + } else { + assert(headers == null, 'headers must be an object or an array') + } + return ret +} + +module.exports = RedirectHandler + + +/***/ }), + +/***/ 3573: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const assert = __nccwpck_require__(2613) + +const { kRetryHandlerDefaultRetry } = __nccwpck_require__(6443) +const { RequestRetryError } = __nccwpck_require__(8707) +const { isDisturbed, parseHeaders, parseRangeHeader } = __nccwpck_require__(3440) + +function calculateRetryAfterHeader (retryAfter) { + const current = Date.now() + const diff = new Date(retryAfter).getTime() - current + + return diff +} + +class RetryHandler { + constructor (opts, handlers) { + const { retryOptions, ...dispatchOpts } = opts + const { + // Retry scoped + retry: retryFn, + maxRetries, + maxTimeout, + minTimeout, + timeoutFactor, + // Response scoped + methods, + errorCodes, + retryAfter, + statusCodes + } = retryOptions ?? {} + + this.dispatch = handlers.dispatch + this.handler = handlers.handler + this.opts = dispatchOpts + this.abort = null + this.aborted = false + this.retryOpts = { + retry: retryFn ?? RetryHandler[kRetryHandlerDefaultRetry], + retryAfter: retryAfter ?? true, + maxTimeout: maxTimeout ?? 30 * 1000, // 30s, + timeout: minTimeout ?? 500, // .5s + timeoutFactor: timeoutFactor ?? 2, + maxRetries: maxRetries ?? 5, + // What errors we should retry + methods: methods ?? ['GET', 'HEAD', 'OPTIONS', 'PUT', 'DELETE', 'TRACE'], + // Indicates which errors to retry + statusCodes: statusCodes ?? [500, 502, 503, 504, 429], + // List of errors to retry + errorCodes: errorCodes ?? [ + 'ECONNRESET', + 'ECONNREFUSED', + 'ENOTFOUND', + 'ENETDOWN', + 'ENETUNREACH', + 'EHOSTDOWN', + 'EHOSTUNREACH', + 'EPIPE' + ] + } + + this.retryCount = 0 + this.start = 0 + this.end = null + this.etag = null + this.resume = null + + // Handle possible onConnect duplication + this.handler.onConnect(reason => { + this.aborted = true + if (this.abort) { + this.abort(reason) + } else { + this.reason = reason + } + }) + } + + onRequestSent () { + if (this.handler.onRequestSent) { + this.handler.onRequestSent() + } + } + + onUpgrade (statusCode, headers, socket) { + if (this.handler.onUpgrade) { + this.handler.onUpgrade(statusCode, headers, socket) + } + } + + onConnect (abort) { + if (this.aborted) { + abort(this.reason) + } else { + this.abort = abort + } + } + + onBodySent (chunk) { + if (this.handler.onBodySent) return this.handler.onBodySent(chunk) + } + + static [kRetryHandlerDefaultRetry] (err, { state, opts }, cb) { + const { statusCode, code, headers } = err + const { method, retryOptions } = opts + const { + maxRetries, + timeout, + maxTimeout, + timeoutFactor, + statusCodes, + errorCodes, + methods + } = retryOptions + let { counter, currentTimeout } = state + + currentTimeout = + currentTimeout != null && currentTimeout > 0 ? currentTimeout : timeout + + // Any code that is not a Undici's originated and allowed to retry + if ( + code && + code !== 'UND_ERR_REQ_RETRY' && + code !== 'UND_ERR_SOCKET' && + !errorCodes.includes(code) + ) { + cb(err) + return + } + + // If a set of method are provided and the current method is not in the list + if (Array.isArray(methods) && !methods.includes(method)) { + cb(err) + return + } + + // If a set of status code are provided and the current status code is not in the list + if ( + statusCode != null && + Array.isArray(statusCodes) && + !statusCodes.includes(statusCode) + ) { + cb(err) + return + } + + // If we reached the max number of retries + if (counter > maxRetries) { + cb(err) + return + } + + let retryAfterHeader = headers != null && headers['retry-after'] + if (retryAfterHeader) { + retryAfterHeader = Number(retryAfterHeader) + retryAfterHeader = isNaN(retryAfterHeader) + ? calculateRetryAfterHeader(retryAfterHeader) + : retryAfterHeader * 1e3 // Retry-After is in seconds + } + + const retryTimeout = + retryAfterHeader > 0 + ? Math.min(retryAfterHeader, maxTimeout) + : Math.min(currentTimeout * timeoutFactor ** counter, maxTimeout) + + state.currentTimeout = retryTimeout + + setTimeout(() => cb(null), retryTimeout) + } + + onHeaders (statusCode, rawHeaders, resume, statusMessage) { + const headers = parseHeaders(rawHeaders) + + this.retryCount += 1 + + if (statusCode >= 300) { + this.abort( + new RequestRetryError('Request failed', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } + + // Checkpoint for resume from where we left it + if (this.resume != null) { + this.resume = null + + if (statusCode !== 206) { + return true + } + + const contentRange = parseRangeHeader(headers['content-range']) + // If no content range + if (!contentRange) { + this.abort( + new RequestRetryError('Content-Range mismatch', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } + + // Let's start with a weak etag check + if (this.etag != null && this.etag !== headers.etag) { + this.abort( + new RequestRetryError('ETag mismatch', statusCode, { + headers, + count: this.retryCount + }) + ) + return false + } + + const { start, size, end = size } = contentRange + + assert(this.start === start, 'content-range mismatch') + assert(this.end == null || this.end === end, 'content-range mismatch') + + this.resume = resume + return true + } + + if (this.end == null) { + if (statusCode === 206) { + // First time we receive 206 + const range = parseRangeHeader(headers['content-range']) + + if (range == null) { + return this.handler.onHeaders( + statusCode, + rawHeaders, + resume, + statusMessage + ) + } + + const { start, size, end = size } = range + + assert( + start != null && Number.isFinite(start) && this.start !== start, + 'content-range mismatch' + ) + assert(Number.isFinite(start)) + assert( + end != null && Number.isFinite(end) && this.end !== end, + 'invalid content-length' + ) + + this.start = start + this.end = end + } + + // We make our best to checkpoint the body for further range headers + if (this.end == null) { + const contentLength = headers['content-length'] + this.end = contentLength != null ? Number(contentLength) : null + } + + assert(Number.isFinite(this.start)) + assert( + this.end == null || Number.isFinite(this.end), + 'invalid content-length' + ) + + this.resume = resume + this.etag = headers.etag != null ? headers.etag : null + + return this.handler.onHeaders( + statusCode, + rawHeaders, + resume, + statusMessage + ) + } + + const err = new RequestRetryError('Request failed', statusCode, { + headers, + count: this.retryCount + }) + + this.abort(err) + + return false + } + + onData (chunk) { + this.start += chunk.length + + return this.handler.onData(chunk) + } + + onComplete (rawTrailers) { + this.retryCount = 0 + return this.handler.onComplete(rawTrailers) + } + + onError (err) { + if (this.aborted || isDisturbed(this.opts.body)) { + return this.handler.onError(err) + } + + this.retryOpts.retry( + err, + { + state: { counter: this.retryCount++, currentTimeout: this.retryAfter }, + opts: { retryOptions: this.retryOpts, ...this.opts } + }, + onRetry.bind(this) + ) + + function onRetry (err) { + if (err != null || this.aborted || isDisturbed(this.opts.body)) { + return this.handler.onError(err) + } + + if (this.start !== 0) { + this.opts = { + ...this.opts, + headers: { + ...this.opts.headers, + range: `bytes=${this.start}-${this.end ?? ''}` + } + } + } + + try { + this.dispatch(this.opts, this) + } catch (err) { + this.handler.onError(err) + } + } + } +} + +module.exports = RetryHandler + + +/***/ }), + +/***/ 4415: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const RedirectHandler = __nccwpck_require__(8299) + +function createRedirectInterceptor ({ maxRedirections: defaultMaxRedirections }) { + return (dispatch) => { + return function Intercept (opts, handler) { + const { maxRedirections = defaultMaxRedirections } = opts + + if (!maxRedirections) { + return dispatch(opts, handler) + } + + const redirectHandler = new RedirectHandler(dispatch, maxRedirections, opts, handler) + opts = { ...opts, maxRedirections: 0 } // Stop sub dispatcher from also redirecting. + return dispatch(opts, redirectHandler) + } + } +} + +module.exports = createRedirectInterceptor + + +/***/ }), + +/***/ 2824: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.SPECIAL_HEADERS = exports.HEADER_STATE = exports.MINOR = exports.MAJOR = exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS = exports.TOKEN = exports.STRICT_TOKEN = exports.HEX = exports.URL_CHAR = exports.STRICT_URL_CHAR = exports.USERINFO_CHARS = exports.MARK = exports.ALPHANUM = exports.NUM = exports.HEX_MAP = exports.NUM_MAP = exports.ALPHA = exports.FINISH = exports.H_METHOD_MAP = exports.METHOD_MAP = exports.METHODS_RTSP = exports.METHODS_ICE = exports.METHODS_HTTP = exports.METHODS = exports.LENIENT_FLAGS = exports.FLAGS = exports.TYPE = exports.ERROR = void 0; +const utils_1 = __nccwpck_require__(172); +// C headers +var ERROR; +(function (ERROR) { + ERROR[ERROR["OK"] = 0] = "OK"; + ERROR[ERROR["INTERNAL"] = 1] = "INTERNAL"; + ERROR[ERROR["STRICT"] = 2] = "STRICT"; + ERROR[ERROR["LF_EXPECTED"] = 3] = "LF_EXPECTED"; + ERROR[ERROR["UNEXPECTED_CONTENT_LENGTH"] = 4] = "UNEXPECTED_CONTENT_LENGTH"; + ERROR[ERROR["CLOSED_CONNECTION"] = 5] = "CLOSED_CONNECTION"; + ERROR[ERROR["INVALID_METHOD"] = 6] = "INVALID_METHOD"; + ERROR[ERROR["INVALID_URL"] = 7] = "INVALID_URL"; + ERROR[ERROR["INVALID_CONSTANT"] = 8] = "INVALID_CONSTANT"; + ERROR[ERROR["INVALID_VERSION"] = 9] = "INVALID_VERSION"; + ERROR[ERROR["INVALID_HEADER_TOKEN"] = 10] = "INVALID_HEADER_TOKEN"; + ERROR[ERROR["INVALID_CONTENT_LENGTH"] = 11] = "INVALID_CONTENT_LENGTH"; + ERROR[ERROR["INVALID_CHUNK_SIZE"] = 12] = "INVALID_CHUNK_SIZE"; + ERROR[ERROR["INVALID_STATUS"] = 13] = "INVALID_STATUS"; + ERROR[ERROR["INVALID_EOF_STATE"] = 14] = "INVALID_EOF_STATE"; + ERROR[ERROR["INVALID_TRANSFER_ENCODING"] = 15] = "INVALID_TRANSFER_ENCODING"; + ERROR[ERROR["CB_MESSAGE_BEGIN"] = 16] = "CB_MESSAGE_BEGIN"; + ERROR[ERROR["CB_HEADERS_COMPLETE"] = 17] = "CB_HEADERS_COMPLETE"; + ERROR[ERROR["CB_MESSAGE_COMPLETE"] = 18] = "CB_MESSAGE_COMPLETE"; + ERROR[ERROR["CB_CHUNK_HEADER"] = 19] = "CB_CHUNK_HEADER"; + ERROR[ERROR["CB_CHUNK_COMPLETE"] = 20] = "CB_CHUNK_COMPLETE"; + ERROR[ERROR["PAUSED"] = 21] = "PAUSED"; + ERROR[ERROR["PAUSED_UPGRADE"] = 22] = "PAUSED_UPGRADE"; + ERROR[ERROR["PAUSED_H2_UPGRADE"] = 23] = "PAUSED_H2_UPGRADE"; + ERROR[ERROR["USER"] = 24] = "USER"; +})(ERROR = exports.ERROR || (exports.ERROR = {})); +var TYPE; +(function (TYPE) { + TYPE[TYPE["BOTH"] = 0] = "BOTH"; + TYPE[TYPE["REQUEST"] = 1] = "REQUEST"; + TYPE[TYPE["RESPONSE"] = 2] = "RESPONSE"; +})(TYPE = exports.TYPE || (exports.TYPE = {})); +var FLAGS; +(function (FLAGS) { + FLAGS[FLAGS["CONNECTION_KEEP_ALIVE"] = 1] = "CONNECTION_KEEP_ALIVE"; + FLAGS[FLAGS["CONNECTION_CLOSE"] = 2] = "CONNECTION_CLOSE"; + FLAGS[FLAGS["CONNECTION_UPGRADE"] = 4] = "CONNECTION_UPGRADE"; + FLAGS[FLAGS["CHUNKED"] = 8] = "CHUNKED"; + FLAGS[FLAGS["UPGRADE"] = 16] = "UPGRADE"; + FLAGS[FLAGS["CONTENT_LENGTH"] = 32] = "CONTENT_LENGTH"; + FLAGS[FLAGS["SKIPBODY"] = 64] = "SKIPBODY"; + FLAGS[FLAGS["TRAILING"] = 128] = "TRAILING"; + // 1 << 8 is unused + FLAGS[FLAGS["TRANSFER_ENCODING"] = 512] = "TRANSFER_ENCODING"; +})(FLAGS = exports.FLAGS || (exports.FLAGS = {})); +var LENIENT_FLAGS; +(function (LENIENT_FLAGS) { + LENIENT_FLAGS[LENIENT_FLAGS["HEADERS"] = 1] = "HEADERS"; + LENIENT_FLAGS[LENIENT_FLAGS["CHUNKED_LENGTH"] = 2] = "CHUNKED_LENGTH"; + LENIENT_FLAGS[LENIENT_FLAGS["KEEP_ALIVE"] = 4] = "KEEP_ALIVE"; +})(LENIENT_FLAGS = exports.LENIENT_FLAGS || (exports.LENIENT_FLAGS = {})); +var METHODS; +(function (METHODS) { + METHODS[METHODS["DELETE"] = 0] = "DELETE"; + METHODS[METHODS["GET"] = 1] = "GET"; + METHODS[METHODS["HEAD"] = 2] = "HEAD"; + METHODS[METHODS["POST"] = 3] = "POST"; + METHODS[METHODS["PUT"] = 4] = "PUT"; + /* pathological */ + METHODS[METHODS["CONNECT"] = 5] = "CONNECT"; + METHODS[METHODS["OPTIONS"] = 6] = "OPTIONS"; + METHODS[METHODS["TRACE"] = 7] = "TRACE"; + /* WebDAV */ + METHODS[METHODS["COPY"] = 8] = "COPY"; + METHODS[METHODS["LOCK"] = 9] = "LOCK"; + METHODS[METHODS["MKCOL"] = 10] = "MKCOL"; + METHODS[METHODS["MOVE"] = 11] = "MOVE"; + METHODS[METHODS["PROPFIND"] = 12] = "PROPFIND"; + METHODS[METHODS["PROPPATCH"] = 13] = "PROPPATCH"; + METHODS[METHODS["SEARCH"] = 14] = "SEARCH"; + METHODS[METHODS["UNLOCK"] = 15] = "UNLOCK"; + METHODS[METHODS["BIND"] = 16] = "BIND"; + METHODS[METHODS["REBIND"] = 17] = "REBIND"; + METHODS[METHODS["UNBIND"] = 18] = "UNBIND"; + METHODS[METHODS["ACL"] = 19] = "ACL"; + /* subversion */ + METHODS[METHODS["REPORT"] = 20] = "REPORT"; + METHODS[METHODS["MKACTIVITY"] = 21] = "MKACTIVITY"; + METHODS[METHODS["CHECKOUT"] = 22] = "CHECKOUT"; + METHODS[METHODS["MERGE"] = 23] = "MERGE"; + /* upnp */ + METHODS[METHODS["M-SEARCH"] = 24] = "M-SEARCH"; + METHODS[METHODS["NOTIFY"] = 25] = "NOTIFY"; + METHODS[METHODS["SUBSCRIBE"] = 26] = "SUBSCRIBE"; + METHODS[METHODS["UNSUBSCRIBE"] = 27] = "UNSUBSCRIBE"; + /* RFC-5789 */ + METHODS[METHODS["PATCH"] = 28] = "PATCH"; + METHODS[METHODS["PURGE"] = 29] = "PURGE"; + /* CalDAV */ + METHODS[METHODS["MKCALENDAR"] = 30] = "MKCALENDAR"; + /* RFC-2068, section 19.6.1.2 */ + METHODS[METHODS["LINK"] = 31] = "LINK"; + METHODS[METHODS["UNLINK"] = 32] = "UNLINK"; + /* icecast */ + METHODS[METHODS["SOURCE"] = 33] = "SOURCE"; + /* RFC-7540, section 11.6 */ + METHODS[METHODS["PRI"] = 34] = "PRI"; + /* RFC-2326 RTSP */ + METHODS[METHODS["DESCRIBE"] = 35] = "DESCRIBE"; + METHODS[METHODS["ANNOUNCE"] = 36] = "ANNOUNCE"; + METHODS[METHODS["SETUP"] = 37] = "SETUP"; + METHODS[METHODS["PLAY"] = 38] = "PLAY"; + METHODS[METHODS["PAUSE"] = 39] = "PAUSE"; + METHODS[METHODS["TEARDOWN"] = 40] = "TEARDOWN"; + METHODS[METHODS["GET_PARAMETER"] = 41] = "GET_PARAMETER"; + METHODS[METHODS["SET_PARAMETER"] = 42] = "SET_PARAMETER"; + METHODS[METHODS["REDIRECT"] = 43] = "REDIRECT"; + METHODS[METHODS["RECORD"] = 44] = "RECORD"; + /* RAOP */ + METHODS[METHODS["FLUSH"] = 45] = "FLUSH"; +})(METHODS = exports.METHODS || (exports.METHODS = {})); +exports.METHODS_HTTP = [ + METHODS.DELETE, + METHODS.GET, + METHODS.HEAD, + METHODS.POST, + METHODS.PUT, + METHODS.CONNECT, + METHODS.OPTIONS, + METHODS.TRACE, + METHODS.COPY, + METHODS.LOCK, + METHODS.MKCOL, + METHODS.MOVE, + METHODS.PROPFIND, + METHODS.PROPPATCH, + METHODS.SEARCH, + METHODS.UNLOCK, + METHODS.BIND, + METHODS.REBIND, + METHODS.UNBIND, + METHODS.ACL, + METHODS.REPORT, + METHODS.MKACTIVITY, + METHODS.CHECKOUT, + METHODS.MERGE, + METHODS['M-SEARCH'], + METHODS.NOTIFY, + METHODS.SUBSCRIBE, + METHODS.UNSUBSCRIBE, + METHODS.PATCH, + METHODS.PURGE, + METHODS.MKCALENDAR, + METHODS.LINK, + METHODS.UNLINK, + METHODS.PRI, + // TODO(indutny): should we allow it with HTTP? + METHODS.SOURCE, +]; +exports.METHODS_ICE = [ + METHODS.SOURCE, +]; +exports.METHODS_RTSP = [ + METHODS.OPTIONS, + METHODS.DESCRIBE, + METHODS.ANNOUNCE, + METHODS.SETUP, + METHODS.PLAY, + METHODS.PAUSE, + METHODS.TEARDOWN, + METHODS.GET_PARAMETER, + METHODS.SET_PARAMETER, + METHODS.REDIRECT, + METHODS.RECORD, + METHODS.FLUSH, + // For AirPlay + METHODS.GET, + METHODS.POST, +]; +exports.METHOD_MAP = utils_1.enumToMap(METHODS); +exports.H_METHOD_MAP = {}; +Object.keys(exports.METHOD_MAP).forEach((key) => { + if (/^H/.test(key)) { + exports.H_METHOD_MAP[key] = exports.METHOD_MAP[key]; + } +}); +var FINISH; +(function (FINISH) { + FINISH[FINISH["SAFE"] = 0] = "SAFE"; + FINISH[FINISH["SAFE_WITH_CB"] = 1] = "SAFE_WITH_CB"; + FINISH[FINISH["UNSAFE"] = 2] = "UNSAFE"; +})(FINISH = exports.FINISH || (exports.FINISH = {})); +exports.ALPHA = []; +for (let i = 'A'.charCodeAt(0); i <= 'Z'.charCodeAt(0); i++) { + // Upper case + exports.ALPHA.push(String.fromCharCode(i)); + // Lower case + exports.ALPHA.push(String.fromCharCode(i + 0x20)); +} +exports.NUM_MAP = { + 0: 0, 1: 1, 2: 2, 3: 3, 4: 4, + 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, +}; +exports.HEX_MAP = { + 0: 0, 1: 1, 2: 2, 3: 3, 4: 4, + 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, + A: 0XA, B: 0XB, C: 0XC, D: 0XD, E: 0XE, F: 0XF, + a: 0xa, b: 0xb, c: 0xc, d: 0xd, e: 0xe, f: 0xf, +}; +exports.NUM = [ + '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', +]; +exports.ALPHANUM = exports.ALPHA.concat(exports.NUM); +exports.MARK = ['-', '_', '.', '!', '~', '*', '\'', '(', ')']; +exports.USERINFO_CHARS = exports.ALPHANUM + .concat(exports.MARK) + .concat(['%', ';', ':', '&', '=', '+', '$', ',']); +// TODO(indutny): use RFC +exports.STRICT_URL_CHAR = [ + '!', '"', '$', '%', '&', '\'', + '(', ')', '*', '+', ',', '-', '.', '/', + ':', ';', '<', '=', '>', + '@', '[', '\\', ']', '^', '_', + '`', + '{', '|', '}', '~', +].concat(exports.ALPHANUM); +exports.URL_CHAR = exports.STRICT_URL_CHAR + .concat(['\t', '\f']); +// All characters with 0x80 bit set to 1 +for (let i = 0x80; i <= 0xff; i++) { + exports.URL_CHAR.push(i); +} +exports.HEX = exports.NUM.concat(['a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F']); +/* Tokens as defined by rfc 2616. Also lowercases them. + * token = 1* + * separators = "(" | ")" | "<" | ">" | "@" + * | "," | ";" | ":" | "\" | <"> + * | "/" | "[" | "]" | "?" | "=" + * | "{" | "}" | SP | HT + */ +exports.STRICT_TOKEN = [ + '!', '#', '$', '%', '&', '\'', + '*', '+', '-', '.', + '^', '_', '`', + '|', '~', +].concat(exports.ALPHANUM); +exports.TOKEN = exports.STRICT_TOKEN.concat([' ']); +/* + * Verify that a char is a valid visible (printable) US-ASCII + * character or %x80-FF + */ +exports.HEADER_CHARS = ['\t']; +for (let i = 32; i <= 255; i++) { + if (i !== 127) { + exports.HEADER_CHARS.push(i); + } +} +// ',' = \x44 +exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS.filter((c) => c !== 44); +exports.MAJOR = exports.NUM_MAP; +exports.MINOR = exports.MAJOR; +var HEADER_STATE; +(function (HEADER_STATE) { + HEADER_STATE[HEADER_STATE["GENERAL"] = 0] = "GENERAL"; + HEADER_STATE[HEADER_STATE["CONNECTION"] = 1] = "CONNECTION"; + HEADER_STATE[HEADER_STATE["CONTENT_LENGTH"] = 2] = "CONTENT_LENGTH"; + HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING"] = 3] = "TRANSFER_ENCODING"; + HEADER_STATE[HEADER_STATE["UPGRADE"] = 4] = "UPGRADE"; + HEADER_STATE[HEADER_STATE["CONNECTION_KEEP_ALIVE"] = 5] = "CONNECTION_KEEP_ALIVE"; + HEADER_STATE[HEADER_STATE["CONNECTION_CLOSE"] = 6] = "CONNECTION_CLOSE"; + HEADER_STATE[HEADER_STATE["CONNECTION_UPGRADE"] = 7] = "CONNECTION_UPGRADE"; + HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING_CHUNKED"] = 8] = "TRANSFER_ENCODING_CHUNKED"; +})(HEADER_STATE = exports.HEADER_STATE || (exports.HEADER_STATE = {})); +exports.SPECIAL_HEADERS = { + 'connection': HEADER_STATE.CONNECTION, + 'content-length': HEADER_STATE.CONTENT_LENGTH, + 'proxy-connection': HEADER_STATE.CONNECTION, + 'transfer-encoding': HEADER_STATE.TRANSFER_ENCODING, + 'upgrade': HEADER_STATE.UPGRADE, +}; +//# sourceMappingURL=constants.js.map + +/***/ }), + +/***/ 3870: +/***/ ((module) => { + +module.exports = 'AGFzbQEAAAABMAhgAX8Bf2ADf39/AX9gBH9/f38Bf2AAAGADf39/AGABfwBgAn9/AGAGf39/f39/AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCsLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0+P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H+sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75+AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja+AgAAPC0HqoYCAAA8LQbStgIAADwtB0q+AgAAPC0HfsoCAAA8LQdKygIAADwtB8LCAgAAPC0GpooCAAA8LQfmjgIAADwtBmZ6AgAAPC0G1rICAAA8LQZuwgIAADwtBkrKAgAAPC0G2q4CAAA8LQcKigIAADwtB+LKAgAAPC0GepYCAAA8LQdCigIAADwtBup6AgAAPC0GBnoCAAA8LEMqAgIAAAAtB1qGAgAAhAQsgAQsWACAAIAAtAC1B/gFxIAFBAEdyOgAtCxkAIAAgAC0ALUH9AXEgAUEAR0EBdHI6AC0LGQAgACAALQAtQfsBcSABQQBHQQJ0cjoALQsZACAAIAAtAC1B9wFxIAFBAEdBA3RyOgAtCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAgAiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCBCIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQcaRgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIwIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAggiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2ioCAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCNCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIMIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZqAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAjgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCECIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZWQgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAI8IgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAhQiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEGqm4CAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCQCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIYIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZOAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCJCIERQ0AIAAgBBGAgICAAAAhAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIsIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAigiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2iICAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCUCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIcIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABBwpmAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCICIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZSUgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAJMIgRFDQAgACAEEYCAgIAAACEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAlQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCWCIERQ0AIAAgBBGAgICAAAAhAwsgAwtFAQF/AkACQCAALwEwQRRxQRRHDQBBASEDIAAtAChBAUYNASAALwEyQeUARiEDDAELIAAtAClBBUYhAwsgACADOgAuQQAL/gEBA39BASEDAkAgAC8BMCIEQQhxDQAgACkDIEIAUiEDCwJAAkAgAC0ALkUNAEEBIQUgAC0AKUEFRg0BQQEhBSAEQcAAcUUgA3FBAUcNAQtBACEFIARBwABxDQBBAiEFIARB//8DcSIDQQhxDQACQCADQYAEcUUNAAJAIAAtAChBAUcNACAALQAtQQpxDQBBBQ8LQQQPCwJAIANBIHENAAJAIAAtAChBAUYNACAALwEyQf//A3EiAEGcf2pB5ABJDQAgAEHMAUYNACAAQbACRg0AQQQhBSAEQShxRQ0CIANBiARxQYAERg0CC0EADwtBAEEDIAApAyBQGyEFCyAFC2IBAn9BACEBAkAgAC0AKEEBRg0AIAAvATJB//8DcSICQZx/akHkAEkNACACQcwBRg0AIAJBsAJGDQAgAC8BMCIAQcAAcQ0AQQEhASAAQYgEcUGABEYNACAAQShxRSEBCyABC6cBAQN/AkACQAJAIAAtACpFDQAgAC0AK0UNAEEAIQMgAC8BMCIEQQJxRQ0BDAILQQAhAyAALwEwIgRBAXFFDQELQQEhAyAALQAoQQFGDQAgAC8BMkH//wNxIgVBnH9qQeQASQ0AIAVBzAFGDQAgBUGwAkYNACAEQcAAcQ0AQQAhAyAEQYgEcUGABEYNACAEQShxQQBHIQMLIABBADsBMCAAQQA6AC8gAwuZAQECfwJAAkACQCAALQAqRQ0AIAAtACtFDQBBACEBIAAvATAiAkECcUUNAQwCC0EAIQEgAC8BMCICQQFxRQ0BC0EBIQEgAC0AKEEBRg0AIAAvATJB//8DcSIAQZx/akHkAEkNACAAQcwBRg0AIABBsAJGDQAgAkHAAHENAEEAIQEgAkGIBHFBgARGDQAgAkEocUEARyEBCyABC1kAIABBGGpCADcDACAAQgA3AwAgAEE4akIANwMAIABBMGpCADcDACAAQShqQgA3AwAgAEEgakIANwMAIABBEGpCADcDACAAQQhqQgA3AwAgAEHdATYCHEEAC3sBAX8CQCAAKAIMIgMNAAJAIAAoAgRFDQAgACABNgIECwJAIAAgASACEMSAgIAAIgMNACAAKAIMDwsgACADNgIcQQAhAyAAKAIEIgFFDQAgACABIAIgACgCCBGBgICAAAAiAUUNACAAIAI2AhQgACABNgIMIAEhAwsgAwvk8wEDDn8DfgR/I4CAgIAAQRBrIgMkgICAgAAgASEEIAEhBSABIQYgASEHIAEhCCABIQkgASEKIAEhCyABIQwgASENIAEhDiABIQ8CQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkAgACgCHCIQQX9qDt0B2gEB2QECAwQFBgcICQoLDA0O2AEPENcBERLWARMUFRYXGBkaG+AB3wEcHR7VAR8gISIjJCXUASYnKCkqKyzTAdIBLS7RAdABLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVG2wFHSElKzwHOAUvNAUzMAU1OT1BRUlNUVVZXWFlaW1xdXl9gYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXp7fH1+f4ABgQGCAYMBhAGFAYYBhwGIAYkBigGLAYwBjQGOAY8BkAGRAZIBkwGUAZUBlgGXAZgBmQGaAZsBnAGdAZ4BnwGgAaEBogGjAaQBpQGmAacBqAGpAaoBqwGsAa0BrgGvAbABsQGyAbMBtAG1AbYBtwHLAcoBuAHJAbkByAG6AbsBvAG9Ab4BvwHAAcEBwgHDAcQBxQHGAQDcAQtBACEQDMYBC0EOIRAMxQELQQ0hEAzEAQtBDyEQDMMBC0EQIRAMwgELQRMhEAzBAQtBFCEQDMABC0EVIRAMvwELQRYhEAy+AQtBFyEQDL0BC0EYIRAMvAELQRkhEAy7AQtBGiEQDLoBC0EbIRAMuQELQRwhEAy4AQtBCCEQDLcBC0EdIRAMtgELQSAhEAy1AQtBHyEQDLQBC0EHIRAMswELQSEhEAyyAQtBIiEQDLEBC0EeIRAMsAELQSMhEAyvAQtBEiEQDK4BC0ERIRAMrQELQSQhEAysAQtBJSEQDKsBC0EmIRAMqgELQSchEAypAQtBwwEhEAyoAQtBKSEQDKcBC0ErIRAMpgELQSwhEAylAQtBLSEQDKQBC0EuIRAMowELQS8hEAyiAQtBxAEhEAyhAQtBMCEQDKABC0E0IRAMnwELQQwhEAyeAQtBMSEQDJ0BC0EyIRAMnAELQTMhEAybAQtBOSEQDJoBC0E1IRAMmQELQcUBIRAMmAELQQshEAyXAQtBOiEQDJYBC0E2IRAMlQELQQohEAyUAQtBNyEQDJMBC0E4IRAMkgELQTwhEAyRAQtBOyEQDJABC0E9IRAMjwELQQkhEAyOAQtBKCEQDI0BC0E+IRAMjAELQT8hEAyLAQtBwAAhEAyKAQtBwQAhEAyJAQtBwgAhEAyIAQtBwwAhEAyHAQtBxAAhEAyGAQtBxQAhEAyFAQtBxgAhEAyEAQtBKiEQDIMBC0HHACEQDIIBC0HIACEQDIEBC0HJACEQDIABC0HKACEQDH8LQcsAIRAMfgtBzQAhEAx9C0HMACEQDHwLQc4AIRAMewtBzwAhEAx6C0HQACEQDHkLQdEAIRAMeAtB0gAhEAx3C0HTACEQDHYLQdQAIRAMdQtB1gAhEAx0C0HVACEQDHMLQQYhEAxyC0HXACEQDHELQQUhEAxwC0HYACEQDG8LQQQhEAxuC0HZACEQDG0LQdoAIRAMbAtB2wAhEAxrC0HcACEQDGoLQQMhEAxpC0HdACEQDGgLQd4AIRAMZwtB3wAhEAxmC0HhACEQDGULQeAAIRAMZAtB4gAhEAxjC0HjACEQDGILQQIhEAxhC0HkACEQDGALQeUAIRAMXwtB5gAhEAxeC0HnACEQDF0LQegAIRAMXAtB6QAhEAxbC0HqACEQDFoLQesAIRAMWQtB7AAhEAxYC0HtACEQDFcLQe4AIRAMVgtB7wAhEAxVC0HwACEQDFQLQfEAIRAMUwtB8gAhEAxSC0HzACEQDFELQfQAIRAMUAtB9QAhEAxPC0H2ACEQDE4LQfcAIRAMTQtB+AAhEAxMC0H5ACEQDEsLQfoAIRAMSgtB+wAhEAxJC0H8ACEQDEgLQf0AIRAMRwtB/gAhEAxGC0H/ACEQDEULQYABIRAMRAtBgQEhEAxDC0GCASEQDEILQYMBIRAMQQtBhAEhEAxAC0GFASEQDD8LQYYBIRAMPgtBhwEhEAw9C0GIASEQDDwLQYkBIRAMOwtBigEhEAw6C0GLASEQDDkLQYwBIRAMOAtBjQEhEAw3C0GOASEQDDYLQY8BIRAMNQtBkAEhEAw0C0GRASEQDDMLQZIBIRAMMgtBkwEhEAwxC0GUASEQDDALQZUBIRAMLwtBlgEhEAwuC0GXASEQDC0LQZgBIRAMLAtBmQEhEAwrC0GaASEQDCoLQZsBIRAMKQtBnAEhEAwoC0GdASEQDCcLQZ4BIRAMJgtBnwEhEAwlC0GgASEQDCQLQaEBIRAMIwtBogEhEAwiC0GjASEQDCELQaQBIRAMIAtBpQEhEAwfC0GmASEQDB4LQacBIRAMHQtBqAEhEAwcC0GpASEQDBsLQaoBIRAMGgtBqwEhEAwZC0GsASEQDBgLQa0BIRAMFwtBrgEhEAwWC0EBIRAMFQtBrwEhEAwUC0GwASEQDBMLQbEBIRAMEgtBswEhEAwRC0GyASEQDBALQbQBIRAMDwtBtQEhEAwOC0G2ASEQDA0LQbcBIRAMDAtBuAEhEAwLC0G5ASEQDAoLQboBIRAMCQtBuwEhEAwIC0HGASEQDAcLQbwBIRAMBgtBvQEhEAwFC0G+ASEQDAQLQb8BIRAMAwtBwAEhEAwCC0HCASEQDAELQcEBIRALA0ACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAQDscBAAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxweHyAhIyUoP0BBREVGR0hJSktMTU9QUVJT3gNXWVtcXWBiZWZnaGlqa2xtb3BxcnN0dXZ3eHl6e3x9foABggGFAYYBhwGJAYsBjAGNAY4BjwGQAZEBlAGVAZYBlwGYAZkBmgGbAZwBnQGeAZ8BoAGhAaIBowGkAaUBpgGnAagBqQGqAasBrAGtAa4BrwGwAbEBsgGzAbQBtQG2AbcBuAG5AboBuwG8Ab0BvgG/AcABwQHCAcMBxAHFAcYBxwHIAckBygHLAcwBzQHOAc8B0AHRAdIB0wHUAdUB1gHXAdgB2QHaAdsB3AHdAd4B4AHhAeIB4wHkAeUB5gHnAegB6QHqAesB7AHtAe4B7wHwAfEB8gHzAZkCpAKwAv4C/gILIAEiBCACRw3zAUHdASEQDP8DCyABIhAgAkcN3QFBwwEhEAz+AwsgASIBIAJHDZABQfcAIRAM/QMLIAEiASACRw2GAUHvACEQDPwDCyABIgEgAkcNf0HqACEQDPsDCyABIgEgAkcNe0HoACEQDPoDCyABIgEgAkcNeEHmACEQDPkDCyABIgEgAkcNGkEYIRAM+AMLIAEiASACRw0UQRIhEAz3AwsgASIBIAJHDVlBxQAhEAz2AwsgASIBIAJHDUpBPyEQDPUDCyABIgEgAkcNSEE8IRAM9AMLIAEiASACRw1BQTEhEAzzAwsgAC0ALkEBRg3rAwyHAgsgACABIgEgAhDAgICAAEEBRw3mASAAQgA3AyAM5wELIAAgASIBIAIQtICAgAAiEA3nASABIQEM9QILAkAgASIBIAJHDQBBBiEQDPADCyAAIAFBAWoiASACELuAgIAAIhAN6AEgASEBDDELIABCADcDIEESIRAM1QMLIAEiECACRw0rQR0hEAztAwsCQCABIgEgAkYNACABQQFqIQFBECEQDNQDC0EHIRAM7AMLIABCACAAKQMgIhEgAiABIhBrrSISfSITIBMgEVYbNwMgIBEgElYiFEUN5QFBCCEQDOsDCwJAIAEiASACRg0AIABBiYCAgAA2AgggACABNgIEIAEhAUEUIRAM0gMLQQkhEAzqAwsgASEBIAApAyBQDeQBIAEhAQzyAgsCQCABIgEgAkcNAEELIRAM6QMLIAAgAUEBaiIBIAIQtoCAgAAiEA3lASABIQEM8gILIAAgASIBIAIQuICAgAAiEA3lASABIQEM8gILIAAgASIBIAIQuICAgAAiEA3mASABIQEMDQsgACABIgEgAhC6gICAACIQDecBIAEhAQzwAgsCQCABIgEgAkcNAEEPIRAM5QMLIAEtAAAiEEE7Rg0IIBBBDUcN6AEgAUEBaiEBDO8CCyAAIAEiASACELqAgIAAIhAN6AEgASEBDPICCwNAAkAgAS0AAEHwtYCAAGotAAAiEEEBRg0AIBBBAkcN6wEgACgCBCEQIABBADYCBCAAIBAgAUEBaiIBELmAgIAAIhAN6gEgASEBDPQCCyABQQFqIgEgAkcNAAtBEiEQDOIDCyAAIAEiASACELqAgIAAIhAN6QEgASEBDAoLIAEiASACRw0GQRshEAzgAwsCQCABIgEgAkcNAEEWIRAM4AMLIABBioCAgAA2AgggACABNgIEIAAgASACELiAgIAAIhAN6gEgASEBQSAhEAzGAwsCQCABIgEgAkYNAANAAkAgAS0AAEHwt4CAAGotAAAiEEECRg0AAkAgEEF/ag4E5QHsAQDrAewBCyABQQFqIQFBCCEQDMgDCyABQQFqIgEgAkcNAAtBFSEQDN8DC0EVIRAM3gMLA0ACQCABLQAAQfC5gIAAai0AACIQQQJGDQAgEEF/ag4E3gHsAeAB6wHsAQsgAUEBaiIBIAJHDQALQRghEAzdAwsCQCABIgEgAkYNACAAQYuAgIAANgIIIAAgATYCBCABIQFBByEQDMQDC0EZIRAM3AMLIAFBAWohAQwCCwJAIAEiFCACRw0AQRohEAzbAwsgFCEBAkAgFC0AAEFzag4U3QLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gIA7gILQQAhECAAQQA2AhwgAEGvi4CAADYCECAAQQI2AgwgACAUQQFqNgIUDNoDCwJAIAEtAAAiEEE7Rg0AIBBBDUcN6AEgAUEBaiEBDOUCCyABQQFqIQELQSIhEAy/AwsCQCABIhAgAkcNAEEcIRAM2AMLQgAhESAQIQEgEC0AAEFQag435wHmAQECAwQFBgcIAAAAAAAAAAkKCwwNDgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADxAREhMUAAtBHiEQDL0DC0ICIREM5QELQgMhEQzkAQtCBCERDOMBC0IFIREM4gELQgYhEQzhAQtCByERDOABC0IIIREM3wELQgkhEQzeAQtCCiERDN0BC0ILIREM3AELQgwhEQzbAQtCDSERDNoBC0IOIREM2QELQg8hEQzYAQtCCiERDNcBC0ILIREM1gELQgwhEQzVAQtCDSERDNQBC0IOIREM0wELQg8hEQzSAQtCACERAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAQLQAAQVBqDjflAeQBAAECAwQFBgfmAeYB5gHmAeYB5gHmAQgJCgsMDeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gEODxAREhPmAQtCAiERDOQBC0IDIREM4wELQgQhEQziAQtCBSERDOEBC0IGIREM4AELQgchEQzfAQtCCCERDN4BC0IJIREM3QELQgohEQzcAQtCCyERDNsBC0IMIREM2gELQg0hEQzZAQtCDiERDNgBC0IPIREM1wELQgohEQzWAQtCCyERDNUBC0IMIREM1AELQg0hEQzTAQtCDiERDNIBC0IPIREM0QELIABCACAAKQMgIhEgAiABIhBrrSISfSITIBMgEVYbNwMgIBEgElYiFEUN0gFBHyEQDMADCwJAIAEiASACRg0AIABBiYCAgAA2AgggACABNgIEIAEhAUEkIRAMpwMLQSAhEAy/AwsgACABIhAgAhC+gICAAEF/ag4FtgEAxQIB0QHSAQtBESEQDKQDCyAAQQE6AC8gECEBDLsDCyABIgEgAkcN0gFBJCEQDLsDCyABIg0gAkcNHkHGACEQDLoDCyAAIAEiASACELKAgIAAIhAN1AEgASEBDLUBCyABIhAgAkcNJkHQACEQDLgDCwJAIAEiASACRw0AQSghEAy4AwsgAEEANgIEIABBjICAgAA2AgggACABIAEQsYCAgAAiEA3TASABIQEM2AELAkAgASIQIAJHDQBBKSEQDLcDCyAQLQAAIgFBIEYNFCABQQlHDdMBIBBBAWohAQwVCwJAIAEiASACRg0AIAFBAWohAQwXC0EqIRAMtQMLAkAgASIQIAJHDQBBKyEQDLUDCwJAIBAtAAAiAUEJRg0AIAFBIEcN1QELIAAtACxBCEYN0wEgECEBDJEDCwJAIAEiASACRw0AQSwhEAy0AwsgAS0AAEEKRw3VASABQQFqIQEMyQILIAEiDiACRw3VAUEvIRAMsgMLA0ACQCABLQAAIhBBIEYNAAJAIBBBdmoOBADcAdwBANoBCyABIQEM4AELIAFBAWoiASACRw0AC0ExIRAMsQMLQTIhECABIhQgAkYNsAMgAiAUayAAKAIAIgFqIRUgFCABa0EDaiEWAkADQCAULQAAIhdBIHIgFyAXQb9/akH/AXFBGkkbQf8BcSABQfC7gIAAai0AAEcNAQJAIAFBA0cNAEEGIQEMlgMLIAFBAWohASAUQQFqIhQgAkcNAAsgACAVNgIADLEDCyAAQQA2AgAgFCEBDNkBC0EzIRAgASIUIAJGDa8DIAIgFGsgACgCACIBaiEVIBQgAWtBCGohFgJAA0AgFC0AACIXQSByIBcgF0G/f2pB/wFxQRpJG0H/AXEgAUH0u4CAAGotAABHDQECQCABQQhHDQBBBSEBDJUDCyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFTYCAAywAwsgAEEANgIAIBQhAQzYAQtBNCEQIAEiFCACRg2uAyACIBRrIAAoAgAiAWohFSAUIAFrQQVqIRYCQANAIBQtAAAiF0EgciAXIBdBv39qQf8BcUEaSRtB/wFxIAFB0MKAgABqLQAARw0BAkAgAUEFRw0AQQchAQyUAwsgAUEBaiEBIBRBAWoiFCACRw0ACyAAIBU2AgAMrwMLIABBADYCACAUIQEM1wELAkAgASIBIAJGDQADQAJAIAEtAABBgL6AgABqLQAAIhBBAUYNACAQQQJGDQogASEBDN0BCyABQQFqIgEgAkcNAAtBMCEQDK4DC0EwIRAMrQMLAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgRg0AIBBBdmoOBNkB2gHaAdkB2gELIAFBAWoiASACRw0AC0E4IRAMrQMLQTghEAysAwsDQAJAIAEtAAAiEEEgRg0AIBBBCUcNAwsgAUEBaiIBIAJHDQALQTwhEAyrAwsDQAJAIAEtAAAiEEEgRg0AAkACQCAQQXZqDgTaAQEB2gEACyAQQSxGDdsBCyABIQEMBAsgAUEBaiIBIAJHDQALQT8hEAyqAwsgASEBDNsBC0HAACEQIAEiFCACRg2oAyACIBRrIAAoAgAiAWohFiAUIAFrQQZqIRcCQANAIBQtAABBIHIgAUGAwICAAGotAABHDQEgAUEGRg2OAyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFjYCAAypAwsgAEEANgIAIBQhAQtBNiEQDI4DCwJAIAEiDyACRw0AQcEAIRAMpwMLIABBjICAgAA2AgggACAPNgIEIA8hASAALQAsQX9qDgTNAdUB1wHZAYcDCyABQQFqIQEMzAELAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgciAQIBBBv39qQf8BcUEaSRtB/wFxIhBBCUYNACAQQSBGDQACQAJAAkACQCAQQZ1/ag4TAAMDAwMDAwMBAwMDAwMDAwMDAgMLIAFBAWohAUExIRAMkQMLIAFBAWohAUEyIRAMkAMLIAFBAWohAUEzIRAMjwMLIAEhAQzQAQsgAUEBaiIBIAJHDQALQTUhEAylAwtBNSEQDKQDCwJAIAEiASACRg0AA0ACQCABLQAAQYC8gIAAai0AAEEBRg0AIAEhAQzTAQsgAUEBaiIBIAJHDQALQT0hEAykAwtBPSEQDKMDCyAAIAEiASACELCAgIAAIhAN1gEgASEBDAELIBBBAWohAQtBPCEQDIcDCwJAIAEiASACRw0AQcIAIRAMoAMLAkADQAJAIAEtAABBd2oOGAAC/gL+AoQD/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4CAP4CCyABQQFqIgEgAkcNAAtBwgAhEAygAwsgAUEBaiEBIAAtAC1BAXFFDb0BIAEhAQtBLCEQDIUDCyABIgEgAkcN0wFBxAAhEAydAwsDQAJAIAEtAABBkMCAgABqLQAAQQFGDQAgASEBDLcCCyABQQFqIgEgAkcNAAtBxQAhEAycAwsgDS0AACIQQSBGDbMBIBBBOkcNgQMgACgCBCEBIABBADYCBCAAIAEgDRCvgICAACIBDdABIA1BAWohAQyzAgtBxwAhECABIg0gAkYNmgMgAiANayAAKAIAIgFqIRYgDSABa0EFaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGQwoCAAGotAABHDYADIAFBBUYN9AIgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMmgMLQcgAIRAgASINIAJGDZkDIAIgDWsgACgCACIBaiEWIA0gAWtBCWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBlsKAgABqLQAARw3/AgJAIAFBCUcNAEECIQEM9QILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJkDCwJAIAEiDSACRw0AQckAIRAMmQMLAkACQCANLQAAIgFBIHIgASABQb9/akH/AXFBGkkbQf8BcUGSf2oOBwCAA4ADgAOAA4ADAYADCyANQQFqIQFBPiEQDIADCyANQQFqIQFBPyEQDP8CC0HKACEQIAEiDSACRg2XAyACIA1rIAAoAgAiAWohFiANIAFrQQFqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQaDCgIAAai0AAEcN/QIgAUEBRg3wAiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyXAwtBywAhECABIg0gAkYNlgMgAiANayAAKAIAIgFqIRYgDSABa0EOaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGiwoCAAGotAABHDfwCIAFBDkYN8AIgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMlgMLQcwAIRAgASINIAJGDZUDIAIgDWsgACgCACIBaiEWIA0gAWtBD2ohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBwMKAgABqLQAARw37AgJAIAFBD0cNAEEDIQEM8QILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJUDC0HNACEQIAEiDSACRg2UAyACIA1rIAAoAgAiAWohFiANIAFrQQVqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQdDCgIAAai0AAEcN+gICQCABQQVHDQBBBCEBDPACCyABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyUAwsCQCABIg0gAkcNAEHOACEQDJQDCwJAAkACQAJAIA0tAAAiAUEgciABIAFBv39qQf8BcUEaSRtB/wFxQZ1/ag4TAP0C/QL9Av0C/QL9Av0C/QL9Av0C/QL9AgH9Av0C/QICA/0CCyANQQFqIQFBwQAhEAz9AgsgDUEBaiEBQcIAIRAM/AILIA1BAWohAUHDACEQDPsCCyANQQFqIQFBxAAhEAz6AgsCQCABIgEgAkYNACAAQY2AgIAANgIIIAAgATYCBCABIQFBxQAhEAz6AgtBzwAhEAySAwsgECEBAkACQCAQLQAAQXZqDgQBqAKoAgCoAgsgEEEBaiEBC0EnIRAM+AILAkAgASIBIAJHDQBB0QAhEAyRAwsCQCABLQAAQSBGDQAgASEBDI0BCyABQQFqIQEgAC0ALUEBcUUNxwEgASEBDIwBCyABIhcgAkcNyAFB0gAhEAyPAwtB0wAhECABIhQgAkYNjgMgAiAUayAAKAIAIgFqIRYgFCABa0EBaiEXA0AgFC0AACABQdbCgIAAai0AAEcNzAEgAUEBRg3HASABQQFqIQEgFEEBaiIUIAJHDQALIAAgFjYCAAyOAwsCQCABIgEgAkcNAEHVACEQDI4DCyABLQAAQQpHDcwBIAFBAWohAQzHAQsCQCABIgEgAkcNAEHWACEQDI0DCwJAAkAgAS0AAEF2ag4EAM0BzQEBzQELIAFBAWohAQzHAQsgAUEBaiEBQcoAIRAM8wILIAAgASIBIAIQroCAgAAiEA3LASABIQFBzQAhEAzyAgsgAC0AKUEiRg2FAwymAgsCQCABIgEgAkcNAEHbACEQDIoDC0EAIRRBASEXQQEhFkEAIRACQAJAAkACQAJAAkACQAJAAkAgAS0AAEFQag4K1AHTAQABAgMEBQYI1QELQQIhEAwGC0EDIRAMBQtBBCEQDAQLQQUhEAwDC0EGIRAMAgtBByEQDAELQQghEAtBACEXQQAhFkEAIRQMzAELQQkhEEEBIRRBACEXQQAhFgzLAQsCQCABIgEgAkcNAEHdACEQDIkDCyABLQAAQS5HDcwBIAFBAWohAQymAgsgASIBIAJHDcwBQd8AIRAMhwMLAkAgASIBIAJGDQAgAEGOgICAADYCCCAAIAE2AgQgASEBQdAAIRAM7gILQeAAIRAMhgMLQeEAIRAgASIBIAJGDYUDIAIgAWsgACgCACIUaiEWIAEgFGtBA2ohFwNAIAEtAAAgFEHiwoCAAGotAABHDc0BIBRBA0YNzAEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMhQMLQeIAIRAgASIBIAJGDYQDIAIgAWsgACgCACIUaiEWIAEgFGtBAmohFwNAIAEtAAAgFEHmwoCAAGotAABHDcwBIBRBAkYNzgEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMhAMLQeMAIRAgASIBIAJGDYMDIAIgAWsgACgCACIUaiEWIAEgFGtBA2ohFwNAIAEtAAAgFEHpwoCAAGotAABHDcsBIBRBA0YNzgEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMgwMLAkAgASIBIAJHDQBB5QAhEAyDAwsgACABQQFqIgEgAhCogICAACIQDc0BIAEhAUHWACEQDOkCCwJAIAEiASACRg0AA0ACQCABLQAAIhBBIEYNAAJAAkACQCAQQbh/ag4LAAHPAc8BzwHPAc8BzwHPAc8BAs8BCyABQQFqIQFB0gAhEAztAgsgAUEBaiEBQdMAIRAM7AILIAFBAWohAUHUACEQDOsCCyABQQFqIgEgAkcNAAtB5AAhEAyCAwtB5AAhEAyBAwsDQAJAIAEtAABB8MKAgABqLQAAIhBBAUYNACAQQX5qDgPPAdAB0QHSAQsgAUEBaiIBIAJHDQALQeYAIRAMgAMLAkAgASIBIAJGDQAgAUEBaiEBDAMLQecAIRAM/wILA0ACQCABLQAAQfDEgIAAai0AACIQQQFGDQACQCAQQX5qDgTSAdMB1AEA1QELIAEhAUHXACEQDOcCCyABQQFqIgEgAkcNAAtB6AAhEAz+AgsCQCABIgEgAkcNAEHpACEQDP4CCwJAIAEtAAAiEEF2ag4augHVAdUBvAHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHKAdUB1QEA0wELIAFBAWohAQtBBiEQDOMCCwNAAkAgAS0AAEHwxoCAAGotAABBAUYNACABIQEMngILIAFBAWoiASACRw0AC0HqACEQDPsCCwJAIAEiASACRg0AIAFBAWohAQwDC0HrACEQDPoCCwJAIAEiASACRw0AQewAIRAM+gILIAFBAWohAQwBCwJAIAEiASACRw0AQe0AIRAM+QILIAFBAWohAQtBBCEQDN4CCwJAIAEiFCACRw0AQe4AIRAM9wILIBQhAQJAAkACQCAULQAAQfDIgIAAai0AAEF/ag4H1AHVAdYBAJwCAQLXAQsgFEEBaiEBDAoLIBRBAWohAQzNAQtBACEQIABBADYCHCAAQZuSgIAANgIQIABBBzYCDCAAIBRBAWo2AhQM9gILAkADQAJAIAEtAABB8MiAgABqLQAAIhBBBEYNAAJAAkAgEEF/ag4H0gHTAdQB2QEABAHZAQsgASEBQdoAIRAM4AILIAFBAWohAUHcACEQDN8CCyABQQFqIgEgAkcNAAtB7wAhEAz2AgsgAUEBaiEBDMsBCwJAIAEiFCACRw0AQfAAIRAM9QILIBQtAABBL0cN1AEgFEEBaiEBDAYLAkAgASIUIAJHDQBB8QAhEAz0AgsCQCAULQAAIgFBL0cNACAUQQFqIQFB3QAhEAzbAgsgAUF2aiIEQRZLDdMBQQEgBHRBiYCAAnFFDdMBDMoCCwJAIAEiASACRg0AIAFBAWohAUHeACEQDNoCC0HyACEQDPICCwJAIAEiFCACRw0AQfQAIRAM8gILIBQhAQJAIBQtAABB8MyAgABqLQAAQX9qDgPJApQCANQBC0HhACEQDNgCCwJAIAEiFCACRg0AA0ACQCAULQAAQfDKgIAAai0AACIBQQNGDQACQCABQX9qDgLLAgDVAQsgFCEBQd8AIRAM2gILIBRBAWoiFCACRw0AC0HzACEQDPECC0HzACEQDPACCwJAIAEiASACRg0AIABBj4CAgAA2AgggACABNgIEIAEhAUHgACEQDNcCC0H1ACEQDO8CCwJAIAEiASACRw0AQfYAIRAM7wILIABBj4CAgAA2AgggACABNgIEIAEhAQtBAyEQDNQCCwNAIAEtAABBIEcNwwIgAUEBaiIBIAJHDQALQfcAIRAM7AILAkAgASIBIAJHDQBB+AAhEAzsAgsgAS0AAEEgRw3OASABQQFqIQEM7wELIAAgASIBIAIQrICAgAAiEA3OASABIQEMjgILAkAgASIEIAJHDQBB+gAhEAzqAgsgBC0AAEHMAEcN0QEgBEEBaiEBQRMhEAzPAQsCQCABIgQgAkcNAEH7ACEQDOkCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRADQCAELQAAIAFB8M6AgABqLQAARw3QASABQQVGDc4BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQfsAIRAM6AILAkAgASIEIAJHDQBB/AAhEAzoAgsCQAJAIAQtAABBvX9qDgwA0QHRAdEB0QHRAdEB0QHRAdEB0QEB0QELIARBAWohAUHmACEQDM8CCyAEQQFqIQFB5wAhEAzOAgsCQCABIgQgAkcNAEH9ACEQDOcCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDc8BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH9ACEQDOcCCyAAQQA2AgAgEEEBaiEBQRAhEAzMAQsCQCABIgQgAkcNAEH+ACEQDOYCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUH2zoCAAGotAABHDc4BIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH+ACEQDOYCCyAAQQA2AgAgEEEBaiEBQRYhEAzLAQsCQCABIgQgAkcNAEH/ACEQDOUCCyACIARrIAAoAgAiAWohFCAEIAFrQQNqIRACQANAIAQtAAAgAUH8zoCAAGotAABHDc0BIAFBA0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH/ACEQDOUCCyAAQQA2AgAgEEEBaiEBQQUhEAzKAQsCQCABIgQgAkcNAEGAASEQDOQCCyAELQAAQdkARw3LASAEQQFqIQFBCCEQDMkBCwJAIAEiBCACRw0AQYEBIRAM4wILAkACQCAELQAAQbJ/ag4DAMwBAcwBCyAEQQFqIQFB6wAhEAzKAgsgBEEBaiEBQewAIRAMyQILAkAgASIEIAJHDQBBggEhEAziAgsCQAJAIAQtAABBuH9qDggAywHLAcsBywHLAcsBAcsBCyAEQQFqIQFB6gAhEAzJAgsgBEEBaiEBQe0AIRAMyAILAkAgASIEIAJHDQBBgwEhEAzhAgsgAiAEayAAKAIAIgFqIRAgBCABa0ECaiEUAkADQCAELQAAIAFBgM+AgABqLQAARw3JASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBA2AgBBgwEhEAzhAgtBACEQIABBADYCACAUQQFqIQEMxgELAkAgASIEIAJHDQBBhAEhEAzgAgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBg8+AgABqLQAARw3IASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBhAEhEAzgAgsgAEEANgIAIBBBAWohAUEjIRAMxQELAkAgASIEIAJHDQBBhQEhEAzfAgsCQAJAIAQtAABBtH9qDggAyAHIAcgByAHIAcgBAcgBCyAEQQFqIQFB7wAhEAzGAgsgBEEBaiEBQfAAIRAMxQILAkAgASIEIAJHDQBBhgEhEAzeAgsgBC0AAEHFAEcNxQEgBEEBaiEBDIMCCwJAIAEiBCACRw0AQYcBIRAM3QILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQYjPgIAAai0AAEcNxQEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYcBIRAM3QILIABBADYCACAQQQFqIQFBLSEQDMIBCwJAIAEiBCACRw0AQYgBIRAM3AILIAIgBGsgACgCACIBaiEUIAQgAWtBCGohEAJAA0AgBC0AACABQdDPgIAAai0AAEcNxAEgAUEIRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYgBIRAM3AILIABBADYCACAQQQFqIQFBKSEQDMEBCwJAIAEiASACRw0AQYkBIRAM2wILQQEhECABLQAAQd8ARw3AASABQQFqIQEMgQILAkAgASIEIAJHDQBBigEhEAzaAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQA0AgBC0AACABQYzPgIAAai0AAEcNwQEgAUEBRg2vAiABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGKASEQDNkCCwJAIAEiBCACRw0AQYsBIRAM2QILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQY7PgIAAai0AAEcNwQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYsBIRAM2QILIABBADYCACAQQQFqIQFBAiEQDL4BCwJAIAEiBCACRw0AQYwBIRAM2AILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfDPgIAAai0AAEcNwAEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYwBIRAM2AILIABBADYCACAQQQFqIQFBHyEQDL0BCwJAIAEiBCACRw0AQY0BIRAM1wILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfLPgIAAai0AAEcNvwEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQY0BIRAM1wILIABBADYCACAQQQFqIQFBCSEQDLwBCwJAIAEiBCACRw0AQY4BIRAM1gILAkACQCAELQAAQbd/ag4HAL8BvwG/Ab8BvwEBvwELIARBAWohAUH4ACEQDL0CCyAEQQFqIQFB+QAhEAy8AgsCQCABIgQgAkcNAEGPASEQDNUCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGRz4CAAGotAABHDb0BIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGPASEQDNUCCyAAQQA2AgAgEEEBaiEBQRghEAy6AQsCQCABIgQgAkcNAEGQASEQDNQCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUGXz4CAAGotAABHDbwBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGQASEQDNQCCyAAQQA2AgAgEEEBaiEBQRchEAy5AQsCQCABIgQgAkcNAEGRASEQDNMCCyACIARrIAAoAgAiAWohFCAEIAFrQQZqIRACQANAIAQtAAAgAUGaz4CAAGotAABHDbsBIAFBBkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGRASEQDNMCCyAAQQA2AgAgEEEBaiEBQRUhEAy4AQsCQCABIgQgAkcNAEGSASEQDNICCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGhz4CAAGotAABHDboBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGSASEQDNICCyAAQQA2AgAgEEEBaiEBQR4hEAy3AQsCQCABIgQgAkcNAEGTASEQDNECCyAELQAAQcwARw24ASAEQQFqIQFBCiEQDLYBCwJAIAQgAkcNAEGUASEQDNACCwJAAkAgBC0AAEG/f2oODwC5AbkBuQG5AbkBuQG5AbkBuQG5AbkBuQG5AQG5AQsgBEEBaiEBQf4AIRAMtwILIARBAWohAUH/ACEQDLYCCwJAIAQgAkcNAEGVASEQDM8CCwJAAkAgBC0AAEG/f2oOAwC4AQG4AQsgBEEBaiEBQf0AIRAMtgILIARBAWohBEGAASEQDLUCCwJAIAQgAkcNAEGWASEQDM4CCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUGnz4CAAGotAABHDbYBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGWASEQDM4CCyAAQQA2AgAgEEEBaiEBQQshEAyzAQsCQCAEIAJHDQBBlwEhEAzNAgsCQAJAAkACQCAELQAAQVNqDiMAuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AQG4AbgBuAG4AbgBArgBuAG4AQO4AQsgBEEBaiEBQfsAIRAMtgILIARBAWohAUH8ACEQDLUCCyAEQQFqIQRBgQEhEAy0AgsgBEEBaiEEQYIBIRAMswILAkAgBCACRw0AQZgBIRAMzAILIAIgBGsgACgCACIBaiEUIAQgAWtBBGohEAJAA0AgBC0AACABQanPgIAAai0AAEcNtAEgAUEERg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZgBIRAMzAILIABBADYCACAQQQFqIQFBGSEQDLEBCwJAIAQgAkcNAEGZASEQDMsCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGuz4CAAGotAABHDbMBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGZASEQDMsCCyAAQQA2AgAgEEEBaiEBQQYhEAywAQsCQCAEIAJHDQBBmgEhEAzKAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBtM+AgABqLQAARw2yASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmgEhEAzKAgsgAEEANgIAIBBBAWohAUEcIRAMrwELAkAgBCACRw0AQZsBIRAMyQILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQbbPgIAAai0AAEcNsQEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZsBIRAMyQILIABBADYCACAQQQFqIQFBJyEQDK4BCwJAIAQgAkcNAEGcASEQDMgCCwJAAkAgBC0AAEGsf2oOAgABsQELIARBAWohBEGGASEQDK8CCyAEQQFqIQRBhwEhEAyuAgsCQCAEIAJHDQBBnQEhEAzHAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBuM+AgABqLQAARw2vASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBnQEhEAzHAgsgAEEANgIAIBBBAWohAUEmIRAMrAELAkAgBCACRw0AQZ4BIRAMxgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQbrPgIAAai0AAEcNrgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZ4BIRAMxgILIABBADYCACAQQQFqIQFBAyEQDKsBCwJAIAQgAkcNAEGfASEQDMUCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDa0BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGfASEQDMUCCyAAQQA2AgAgEEEBaiEBQQwhEAyqAQsCQCAEIAJHDQBBoAEhEAzEAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFBvM+AgABqLQAARw2sASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBoAEhEAzEAgsgAEEANgIAIBBBAWohAUENIRAMqQELAkAgBCACRw0AQaEBIRAMwwILAkACQCAELQAAQbp/ag4LAKwBrAGsAawBrAGsAawBrAGsAQGsAQsgBEEBaiEEQYsBIRAMqgILIARBAWohBEGMASEQDKkCCwJAIAQgAkcNAEGiASEQDMICCyAELQAAQdAARw2pASAEQQFqIQQM6QELAkAgBCACRw0AQaMBIRAMwQILAkACQCAELQAAQbd/ag4HAaoBqgGqAaoBqgEAqgELIARBAWohBEGOASEQDKgCCyAEQQFqIQFBIiEQDKYBCwJAIAQgAkcNAEGkASEQDMACCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUHAz4CAAGotAABHDagBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGkASEQDMACCyAAQQA2AgAgEEEBaiEBQR0hEAylAQsCQCAEIAJHDQBBpQEhEAy/AgsCQAJAIAQtAABBrn9qDgMAqAEBqAELIARBAWohBEGQASEQDKYCCyAEQQFqIQFBBCEQDKQBCwJAIAQgAkcNAEGmASEQDL4CCwJAAkACQAJAAkAgBC0AAEG/f2oOFQCqAaoBqgGqAaoBqgGqAaoBqgGqAQGqAaoBAqoBqgEDqgGqAQSqAQsgBEEBaiEEQYgBIRAMqAILIARBAWohBEGJASEQDKcCCyAEQQFqIQRBigEhEAymAgsgBEEBaiEEQY8BIRAMpQILIARBAWohBEGRASEQDKQCCwJAIAQgAkcNAEGnASEQDL0CCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDaUBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGnASEQDL0CCyAAQQA2AgAgEEEBaiEBQREhEAyiAQsCQCAEIAJHDQBBqAEhEAy8AgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBws+AgABqLQAARw2kASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBqAEhEAy8AgsgAEEANgIAIBBBAWohAUEsIRAMoQELAkAgBCACRw0AQakBIRAMuwILIAIgBGsgACgCACIBaiEUIAQgAWtBBGohEAJAA0AgBC0AACABQcXPgIAAai0AAEcNowEgAUEERg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQakBIRAMuwILIABBADYCACAQQQFqIQFBKyEQDKABCwJAIAQgAkcNAEGqASEQDLoCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHKz4CAAGotAABHDaIBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGqASEQDLoCCyAAQQA2AgAgEEEBaiEBQRQhEAyfAQsCQCAEIAJHDQBBqwEhEAy5AgsCQAJAAkACQCAELQAAQb5/ag4PAAECpAGkAaQBpAGkAaQBpAGkAaQBpAGkAQOkAQsgBEEBaiEEQZMBIRAMogILIARBAWohBEGUASEQDKECCyAEQQFqIQRBlQEhEAygAgsgBEEBaiEEQZYBIRAMnwILAkAgBCACRw0AQawBIRAMuAILIAQtAABBxQBHDZ8BIARBAWohBAzgAQsCQCAEIAJHDQBBrQEhEAy3AgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBzc+AgABqLQAARw2fASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBrQEhEAy3AgsgAEEANgIAIBBBAWohAUEOIRAMnAELAkAgBCACRw0AQa4BIRAMtgILIAQtAABB0ABHDZ0BIARBAWohAUElIRAMmwELAkAgBCACRw0AQa8BIRAMtQILIAIgBGsgACgCACIBaiEUIAQgAWtBCGohEAJAA0AgBC0AACABQdDPgIAAai0AAEcNnQEgAUEIRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQa8BIRAMtQILIABBADYCACAQQQFqIQFBKiEQDJoBCwJAIAQgAkcNAEGwASEQDLQCCwJAAkAgBC0AAEGrf2oOCwCdAZ0BnQGdAZ0BnQGdAZ0BnQEBnQELIARBAWohBEGaASEQDJsCCyAEQQFqIQRBmwEhEAyaAgsCQCAEIAJHDQBBsQEhEAyzAgsCQAJAIAQtAABBv39qDhQAnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBAZwBCyAEQQFqIQRBmQEhEAyaAgsgBEEBaiEEQZwBIRAMmQILAkAgBCACRw0AQbIBIRAMsgILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQdnPgIAAai0AAEcNmgEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbIBIRAMsgILIABBADYCACAQQQFqIQFBISEQDJcBCwJAIAQgAkcNAEGzASEQDLECCyACIARrIAAoAgAiAWohFCAEIAFrQQZqIRACQANAIAQtAAAgAUHdz4CAAGotAABHDZkBIAFBBkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGzASEQDLECCyAAQQA2AgAgEEEBaiEBQRohEAyWAQsCQCAEIAJHDQBBtAEhEAywAgsCQAJAAkAgBC0AAEG7f2oOEQCaAZoBmgGaAZoBmgGaAZoBmgEBmgGaAZoBmgGaAQKaAQsgBEEBaiEEQZ0BIRAMmAILIARBAWohBEGeASEQDJcCCyAEQQFqIQRBnwEhEAyWAgsCQCAEIAJHDQBBtQEhEAyvAgsgAiAEayAAKAIAIgFqIRQgBCABa0EFaiEQAkADQCAELQAAIAFB5M+AgABqLQAARw2XASABQQVGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBtQEhEAyvAgsgAEEANgIAIBBBAWohAUEoIRAMlAELAkAgBCACRw0AQbYBIRAMrgILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQerPgIAAai0AAEcNlgEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbYBIRAMrgILIABBADYCACAQQQFqIQFBByEQDJMBCwJAIAQgAkcNAEG3ASEQDK0CCwJAAkAgBC0AAEG7f2oODgCWAZYBlgGWAZYBlgGWAZYBlgGWAZYBlgEBlgELIARBAWohBEGhASEQDJQCCyAEQQFqIQRBogEhEAyTAgsCQCAEIAJHDQBBuAEhEAysAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFB7c+AgABqLQAARw2UASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBuAEhEAysAgsgAEEANgIAIBBBAWohAUESIRAMkQELAkAgBCACRw0AQbkBIRAMqwILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfDPgIAAai0AAEcNkwEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbkBIRAMqwILIABBADYCACAQQQFqIQFBICEQDJABCwJAIAQgAkcNAEG6ASEQDKoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUHyz4CAAGotAABHDZIBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG6ASEQDKoCCyAAQQA2AgAgEEEBaiEBQQ8hEAyPAQsCQCAEIAJHDQBBuwEhEAypAgsCQAJAIAQtAABBt39qDgcAkgGSAZIBkgGSAQGSAQsgBEEBaiEEQaUBIRAMkAILIARBAWohBEGmASEQDI8CCwJAIAQgAkcNAEG8ASEQDKgCCyACIARrIAAoAgAiAWohFCAEIAFrQQdqIRACQANAIAQtAAAgAUH0z4CAAGotAABHDZABIAFBB0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG8ASEQDKgCCyAAQQA2AgAgEEEBaiEBQRshEAyNAQsCQCAEIAJHDQBBvQEhEAynAgsCQAJAAkAgBC0AAEG+f2oOEgCRAZEBkQGRAZEBkQGRAZEBkQEBkQGRAZEBkQGRAZEBApEBCyAEQQFqIQRBpAEhEAyPAgsgBEEBaiEEQacBIRAMjgILIARBAWohBEGoASEQDI0CCwJAIAQgAkcNAEG+ASEQDKYCCyAELQAAQc4ARw2NASAEQQFqIQQMzwELAkAgBCACRw0AQb8BIRAMpQILAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkAgBC0AAEG/f2oOFQABAgOcAQQFBpwBnAGcAQcICQoLnAEMDQ4PnAELIARBAWohAUHoACEQDJoCCyAEQQFqIQFB6QAhEAyZAgsgBEEBaiEBQe4AIRAMmAILIARBAWohAUHyACEQDJcCCyAEQQFqIQFB8wAhEAyWAgsgBEEBaiEBQfYAIRAMlQILIARBAWohAUH3ACEQDJQCCyAEQQFqIQFB+gAhEAyTAgsgBEEBaiEEQYMBIRAMkgILIARBAWohBEGEASEQDJECCyAEQQFqIQRBhQEhEAyQAgsgBEEBaiEEQZIBIRAMjwILIARBAWohBEGYASEQDI4CCyAEQQFqIQRBoAEhEAyNAgsgBEEBaiEEQaMBIRAMjAILIARBAWohBEGqASEQDIsCCwJAIAQgAkYNACAAQZCAgIAANgIIIAAgBDYCBEGrASEQDIsCC0HAASEQDKMCCyAAIAUgAhCqgICAACIBDYsBIAUhAQxcCwJAIAYgAkYNACAGQQFqIQUMjQELQcIBIRAMoQILA0ACQCAQLQAAQXZqDgSMAQAAjwEACyAQQQFqIhAgAkcNAAtBwwEhEAygAgsCQCAHIAJGDQAgAEGRgICAADYCCCAAIAc2AgQgByEBQQEhEAyHAgtBxAEhEAyfAgsCQCAHIAJHDQBBxQEhEAyfAgsCQAJAIActAABBdmoOBAHOAc4BAM4BCyAHQQFqIQYMjQELIAdBAWohBQyJAQsCQCAHIAJHDQBBxgEhEAyeAgsCQAJAIActAABBdmoOFwGPAY8BAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAQCPAQsgB0EBaiEHC0GwASEQDIQCCwJAIAggAkcNAEHIASEQDJ0CCyAILQAAQSBHDY0BIABBADsBMiAIQQFqIQFBswEhEAyDAgsgASEXAkADQCAXIgcgAkYNASAHLQAAQVBqQf8BcSIQQQpPDcwBAkAgAC8BMiIUQZkzSw0AIAAgFEEKbCIUOwEyIBBB//8DcyAUQf7/A3FJDQAgB0EBaiEXIAAgFCAQaiIQOwEyIBBB//8DcUHoB0kNAQsLQQAhECAAQQA2AhwgAEHBiYCAADYCECAAQQ02AgwgACAHQQFqNgIUDJwCC0HHASEQDJsCCyAAIAggAhCugICAACIQRQ3KASAQQRVHDYwBIABByAE2AhwgACAINgIUIABByZeAgAA2AhAgAEEVNgIMQQAhEAyaAgsCQCAJIAJHDQBBzAEhEAyaAgtBACEUQQEhF0EBIRZBACEQAkACQAJAAkACQAJAAkACQAJAIAktAABBUGoOCpYBlQEAAQIDBAUGCJcBC0ECIRAMBgtBAyEQDAULQQQhEAwEC0EFIRAMAwtBBiEQDAILQQchEAwBC0EIIRALQQAhF0EAIRZBACEUDI4BC0EJIRBBASEUQQAhF0EAIRYMjQELAkAgCiACRw0AQc4BIRAMmQILIAotAABBLkcNjgEgCkEBaiEJDMoBCyALIAJHDY4BQdABIRAMlwILAkAgCyACRg0AIABBjoCAgAA2AgggACALNgIEQbcBIRAM/gELQdEBIRAMlgILAkAgBCACRw0AQdIBIRAMlgILIAIgBGsgACgCACIQaiEUIAQgEGtBBGohCwNAIAQtAAAgEEH8z4CAAGotAABHDY4BIBBBBEYN6QEgEEEBaiEQIARBAWoiBCACRw0ACyAAIBQ2AgBB0gEhEAyVAgsgACAMIAIQrICAgAAiAQ2NASAMIQEMuAELAkAgBCACRw0AQdQBIRAMlAILIAIgBGsgACgCACIQaiEUIAQgEGtBAWohDANAIAQtAAAgEEGB0ICAAGotAABHDY8BIBBBAUYNjgEgEEEBaiEQIARBAWoiBCACRw0ACyAAIBQ2AgBB1AEhEAyTAgsCQCAEIAJHDQBB1gEhEAyTAgsgAiAEayAAKAIAIhBqIRQgBCAQa0ECaiELA0AgBC0AACAQQYPQgIAAai0AAEcNjgEgEEECRg2QASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHWASEQDJICCwJAIAQgAkcNAEHXASEQDJICCwJAAkAgBC0AAEG7f2oOEACPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BAY8BCyAEQQFqIQRBuwEhEAz5AQsgBEEBaiEEQbwBIRAM+AELAkAgBCACRw0AQdgBIRAMkQILIAQtAABByABHDYwBIARBAWohBAzEAQsCQCAEIAJGDQAgAEGQgICAADYCCCAAIAQ2AgRBvgEhEAz3AQtB2QEhEAyPAgsCQCAEIAJHDQBB2gEhEAyPAgsgBC0AAEHIAEYNwwEgAEEBOgAoDLkBCyAAQQI6AC8gACAEIAIQpoCAgAAiEA2NAUHCASEQDPQBCyAALQAoQX9qDgK3AbkBuAELA0ACQCAELQAAQXZqDgQAjgGOAQCOAQsgBEEBaiIEIAJHDQALQd0BIRAMiwILIABBADoALyAALQAtQQRxRQ2EAgsgAEEAOgAvIABBAToANCABIQEMjAELIBBBFUYN2gEgAEEANgIcIAAgATYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAMiAILAkAgACAQIAIQtICAgAAiBA0AIBAhAQyBAgsCQCAEQRVHDQAgAEEDNgIcIAAgEDYCFCAAQbCYgIAANgIQIABBFTYCDEEAIRAMiAILIABBADYCHCAAIBA2AhQgAEGnjoCAADYCECAAQRI2AgxBACEQDIcCCyAQQRVGDdYBIABBADYCHCAAIAE2AhQgAEHajYCAADYCECAAQRQ2AgxBACEQDIYCCyAAKAIEIRcgAEEANgIEIBAgEadqIhYhASAAIBcgECAWIBQbIhAQtYCAgAAiFEUNjQEgAEEHNgIcIAAgEDYCFCAAIBQ2AgxBACEQDIUCCyAAIAAvATBBgAFyOwEwIAEhAQtBKiEQDOoBCyAQQRVGDdEBIABBADYCHCAAIAE2AhQgAEGDjICAADYCECAAQRM2AgxBACEQDIICCyAQQRVGDc8BIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDIECCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyNAQsgAEEMNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDIACCyAQQRVGDcwBIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDP8BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyMAQsgAEENNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDP4BCyAQQRVGDckBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDP0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQuYCAgAAiEA0AIAFBAWohAQyLAQsgAEEONgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPwBCyAAQQA2AhwgACABNgIUIABBwJWAgAA2AhAgAEECNgIMQQAhEAz7AQsgEEEVRg3FASAAQQA2AhwgACABNgIUIABBxoyAgAA2AhAgAEEjNgIMQQAhEAz6AQsgAEEQNgIcIAAgATYCFCAAIBA2AgxBACEQDPkBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQuYCAgAAiBA0AIAFBAWohAQzxAQsgAEERNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPgBCyAQQRVGDcEBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDPcBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQuYCAgAAiEA0AIAFBAWohAQyIAQsgAEETNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPYBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQuYCAgAAiBA0AIAFBAWohAQztAQsgAEEUNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPUBCyAQQRVGDb0BIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDPQBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyGAQsgAEEWNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPMBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQt4CAgAAiBA0AIAFBAWohAQzpAQsgAEEXNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPIBCyAAQQA2AhwgACABNgIUIABBzZOAgAA2AhAgAEEMNgIMQQAhEAzxAQtCASERCyAQQQFqIQECQCAAKQMgIhJC//////////8PVg0AIAAgEkIEhiARhDcDICABIQEMhAELIABBADYCHCAAIAE2AhQgAEGtiYCAADYCECAAQQw2AgxBACEQDO8BCyAAQQA2AhwgACAQNgIUIABBzZOAgAA2AhAgAEEMNgIMQQAhEAzuAQsgACgCBCEXIABBADYCBCAQIBGnaiIWIQEgACAXIBAgFiAUGyIQELWAgIAAIhRFDXMgAEEFNgIcIAAgEDYCFCAAIBQ2AgxBACEQDO0BCyAAQQA2AhwgACAQNgIUIABBqpyAgAA2AhAgAEEPNgIMQQAhEAzsAQsgACAQIAIQtICAgAAiAQ0BIBAhAQtBDiEQDNEBCwJAIAFBFUcNACAAQQI2AhwgACAQNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAzqAQsgAEEANgIcIAAgEDYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAM6QELIAFBAWohEAJAIAAvATAiAUGAAXFFDQACQCAAIBAgAhC7gICAACIBDQAgECEBDHALIAFBFUcNugEgAEEFNgIcIAAgEDYCFCAAQfmXgIAANgIQIABBFTYCDEEAIRAM6QELAkAgAUGgBHFBoARHDQAgAC0ALUECcQ0AIABBADYCHCAAIBA2AhQgAEGWk4CAADYCECAAQQQ2AgxBACEQDOkBCyAAIBAgAhC9gICAABogECEBAkACQAJAAkACQCAAIBAgAhCzgICAAA4WAgEABAQEBAQEBAQEBAQEBAQEBAQEAwQLIABBAToALgsgACAALwEwQcAAcjsBMCAQIQELQSYhEAzRAQsgAEEjNgIcIAAgEDYCFCAAQaWWgIAANgIQIABBFTYCDEEAIRAM6QELIABBADYCHCAAIBA2AhQgAEHVi4CAADYCECAAQRE2AgxBACEQDOgBCyAALQAtQQFxRQ0BQcMBIRAMzgELAkAgDSACRg0AA0ACQCANLQAAQSBGDQAgDSEBDMQBCyANQQFqIg0gAkcNAAtBJSEQDOcBC0ElIRAM5gELIAAoAgQhBCAAQQA2AgQgACAEIA0Qr4CAgAAiBEUNrQEgAEEmNgIcIAAgBDYCDCAAIA1BAWo2AhRBACEQDOUBCyAQQRVGDasBIABBADYCHCAAIAE2AhQgAEH9jYCAADYCECAAQR02AgxBACEQDOQBCyAAQSc2AhwgACABNgIUIAAgEDYCDEEAIRAM4wELIBAhAUEBIRQCQAJAAkACQAJAAkACQCAALQAsQX5qDgcGBQUDAQIABQsgACAALwEwQQhyOwEwDAMLQQIhFAwBC0EEIRQLIABBAToALCAAIAAvATAgFHI7ATALIBAhAQtBKyEQDMoBCyAAQQA2AhwgACAQNgIUIABBq5KAgAA2AhAgAEELNgIMQQAhEAziAQsgAEEANgIcIAAgATYCFCAAQeGPgIAANgIQIABBCjYCDEEAIRAM4QELIABBADoALCAQIQEMvQELIBAhAUEBIRQCQAJAAkACQAJAIAAtACxBe2oOBAMBAgAFCyAAIAAvATBBCHI7ATAMAwtBAiEUDAELQQQhFAsgAEEBOgAsIAAgAC8BMCAUcjsBMAsgECEBC0EpIRAMxQELIABBADYCHCAAIAE2AhQgAEHwlICAADYCECAAQQM2AgxBACEQDN0BCwJAIA4tAABBDUcNACAAKAIEIQEgAEEANgIEAkAgACABIA4QsYCAgAAiAQ0AIA5BAWohAQx1CyAAQSw2AhwgACABNgIMIAAgDkEBajYCFEEAIRAM3QELIAAtAC1BAXFFDQFBxAEhEAzDAQsCQCAOIAJHDQBBLSEQDNwBCwJAAkADQAJAIA4tAABBdmoOBAIAAAMACyAOQQFqIg4gAkcNAAtBLSEQDN0BCyAAKAIEIQEgAEEANgIEAkAgACABIA4QsYCAgAAiAQ0AIA4hAQx0CyAAQSw2AhwgACAONgIUIAAgATYCDEEAIRAM3AELIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDkEBaiEBDHMLIABBLDYCHCAAIAE2AgwgACAOQQFqNgIUQQAhEAzbAQsgACgCBCEEIABBADYCBCAAIAQgDhCxgICAACIEDaABIA4hAQzOAQsgEEEsRw0BIAFBAWohEEEBIQECQAJAAkACQAJAIAAtACxBe2oOBAMBAgQACyAQIQEMBAtBAiEBDAELQQQhAQsgAEEBOgAsIAAgAC8BMCABcjsBMCAQIQEMAQsgACAALwEwQQhyOwEwIBAhAQtBOSEQDL8BCyAAQQA6ACwgASEBC0E0IRAMvQELIAAgAC8BMEEgcjsBMCABIQEMAgsgACgCBCEEIABBADYCBAJAIAAgBCABELGAgIAAIgQNACABIQEMxwELIABBNzYCHCAAIAE2AhQgACAENgIMQQAhEAzUAQsgAEEIOgAsIAEhAQtBMCEQDLkBCwJAIAAtAChBAUYNACABIQEMBAsgAC0ALUEIcUUNkwEgASEBDAMLIAAtADBBIHENlAFBxQEhEAy3AQsCQCAPIAJGDQACQANAAkAgDy0AAEFQaiIBQf8BcUEKSQ0AIA8hAUE1IRAMugELIAApAyAiEUKZs+bMmbPmzBlWDQEgACARQgp+IhE3AyAgESABrUL/AYMiEkJ/hVYNASAAIBEgEnw3AyAgD0EBaiIPIAJHDQALQTkhEAzRAQsgACgCBCECIABBADYCBCAAIAIgD0EBaiIEELGAgIAAIgINlQEgBCEBDMMBC0E5IRAMzwELAkAgAC8BMCIBQQhxRQ0AIAAtAChBAUcNACAALQAtQQhxRQ2QAQsgACABQff7A3FBgARyOwEwIA8hAQtBNyEQDLQBCyAAIAAvATBBEHI7ATAMqwELIBBBFUYNiwEgAEEANgIcIAAgATYCFCAAQfCOgIAANgIQIABBHDYCDEEAIRAMywELIABBwwA2AhwgACABNgIMIAAgDUEBajYCFEEAIRAMygELAkAgAS0AAEE6Rw0AIAAoAgQhECAAQQA2AgQCQCAAIBAgARCvgICAACIQDQAgAUEBaiEBDGMLIABBwwA2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAMygELIABBADYCHCAAIAE2AhQgAEGxkYCAADYCECAAQQo2AgxBACEQDMkBCyAAQQA2AhwgACABNgIUIABBoJmAgAA2AhAgAEEeNgIMQQAhEAzIAQsgAEEANgIACyAAQYASOwEqIAAgF0EBaiIBIAIQqICAgAAiEA0BIAEhAQtBxwAhEAysAQsgEEEVRw2DASAAQdEANgIcIAAgATYCFCAAQeOXgIAANgIQIABBFTYCDEEAIRAMxAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDF4LIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMwwELIABBADYCHCAAIBQ2AhQgAEHBqICAADYCECAAQQc2AgwgAEEANgIAQQAhEAzCAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMXQsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAzBAQtBACEQIABBADYCHCAAIAE2AhQgAEGAkYCAADYCECAAQQk2AgwMwAELIBBBFUYNfSAAQQA2AhwgACABNgIUIABBlI2AgAA2AhAgAEEhNgIMQQAhEAy/AQtBASEWQQAhF0EAIRRBASEQCyAAIBA6ACsgAUEBaiEBAkACQCAALQAtQRBxDQACQAJAAkAgAC0AKg4DAQACBAsgFkUNAwwCCyAUDQEMAgsgF0UNAQsgACgCBCEQIABBADYCBAJAIAAgECABEK2AgIAAIhANACABIQEMXAsgAEHYADYCHCAAIAE2AhQgACAQNgIMQQAhEAy+AQsgACgCBCEEIABBADYCBAJAIAAgBCABEK2AgIAAIgQNACABIQEMrQELIABB2QA2AhwgACABNgIUIAAgBDYCDEEAIRAMvQELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKsBCyAAQdoANgIcIAAgATYCFCAAIAQ2AgxBACEQDLwBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQypAQsgAEHcADYCHCAAIAE2AhQgACAENgIMQQAhEAy7AQsCQCABLQAAQVBqIhBB/wFxQQpPDQAgACAQOgAqIAFBAWohAUHPACEQDKIBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQynAQsgAEHeADYCHCAAIAE2AhQgACAENgIMQQAhEAy6AQsgAEEANgIAIBdBAWohAQJAIAAtAClBI08NACABIQEMWQsgAEEANgIcIAAgATYCFCAAQdOJgIAANgIQIABBCDYCDEEAIRAMuQELIABBADYCAAtBACEQIABBADYCHCAAIAE2AhQgAEGQs4CAADYCECAAQQg2AgwMtwELIABBADYCACAXQQFqIQECQCAALQApQSFHDQAgASEBDFYLIABBADYCHCAAIAE2AhQgAEGbioCAADYCECAAQQg2AgxBACEQDLYBCyAAQQA2AgAgF0EBaiEBAkAgAC0AKSIQQV1qQQtPDQAgASEBDFULAkAgEEEGSw0AQQEgEHRBygBxRQ0AIAEhAQxVC0EAIRAgAEEANgIcIAAgATYCFCAAQfeJgIAANgIQIABBCDYCDAy1AQsgEEEVRg1xIABBADYCHCAAIAE2AhQgAEG5jYCAADYCECAAQRo2AgxBACEQDLQBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxUCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDLMBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQdIANgIcIAAgATYCFCAAIBA2AgxBACEQDLIBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDLEBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxRCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDLABCyAAQQA2AhwgACABNgIUIABBxoqAgAA2AhAgAEEHNgIMQQAhEAyvAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMSQsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAyuAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMSQsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAytAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMTQsgAEHlADYCHCAAIAE2AhQgACAQNgIMQQAhEAysAQsgAEEANgIcIAAgATYCFCAAQdyIgIAANgIQIABBBzYCDEEAIRAMqwELIBBBP0cNASABQQFqIQELQQUhEAyQAQtBACEQIABBADYCHCAAIAE2AhQgAEH9koCAADYCECAAQQc2AgwMqAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEILIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMpwELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEILIABB0wA2AhwgACABNgIUIAAgEDYCDEEAIRAMpgELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEYLIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMpQELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDD8LIABB0gA2AhwgACAUNgIUIAAgATYCDEEAIRAMpAELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDD8LIABB0wA2AhwgACAUNgIUIAAgATYCDEEAIRAMowELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDEMLIABB5QA2AhwgACAUNgIUIAAgATYCDEEAIRAMogELIABBADYCHCAAIBQ2AhQgAEHDj4CAADYCECAAQQc2AgxBACEQDKEBCyAAQQA2AhwgACABNgIUIABBw4+AgAA2AhAgAEEHNgIMQQAhEAygAQtBACEQIABBADYCHCAAIBQ2AhQgAEGMnICAADYCECAAQQc2AgwMnwELIABBADYCHCAAIBQ2AhQgAEGMnICAADYCECAAQQc2AgxBACEQDJ4BCyAAQQA2AhwgACAUNgIUIABB/pGAgAA2AhAgAEEHNgIMQQAhEAydAQsgAEEANgIcIAAgATYCFCAAQY6bgIAANgIQIABBBjYCDEEAIRAMnAELIBBBFUYNVyAAQQA2AhwgACABNgIUIABBzI6AgAA2AhAgAEEgNgIMQQAhEAybAQsgAEEANgIAIBBBAWohAUEkIRALIAAgEDoAKSAAKAIEIRAgAEEANgIEIAAgECABEKuAgIAAIhANVCABIQEMPgsgAEEANgIAC0EAIRAgAEEANgIcIAAgBDYCFCAAQfGbgIAANgIQIABBBjYCDAyXAQsgAUEVRg1QIABBADYCHCAAIAU2AhQgAEHwjICAADYCECAAQRs2AgxBACEQDJYBCyAAKAIEIQUgAEEANgIEIAAgBSAQEKmAgIAAIgUNASAQQQFqIQULQa0BIRAMewsgAEHBATYCHCAAIAU2AgwgACAQQQFqNgIUQQAhEAyTAQsgACgCBCEGIABBADYCBCAAIAYgEBCpgICAACIGDQEgEEEBaiEGC0GuASEQDHgLIABBwgE2AhwgACAGNgIMIAAgEEEBajYCFEEAIRAMkAELIABBADYCHCAAIAc2AhQgAEGXi4CAADYCECAAQQ02AgxBACEQDI8BCyAAQQA2AhwgACAINgIUIABB45CAgAA2AhAgAEEJNgIMQQAhEAyOAQsgAEEANgIcIAAgCDYCFCAAQZSNgIAANgIQIABBITYCDEEAIRAMjQELQQEhFkEAIRdBACEUQQEhEAsgACAQOgArIAlBAWohCAJAAkAgAC0ALUEQcQ0AAkACQAJAIAAtACoOAwEAAgQLIBZFDQMMAgsgFA0BDAILIBdFDQELIAAoAgQhECAAQQA2AgQgACAQIAgQrYCAgAAiEEUNPSAAQckBNgIcIAAgCDYCFCAAIBA2AgxBACEQDIwBCyAAKAIEIQQgAEEANgIEIAAgBCAIEK2AgIAAIgRFDXYgAEHKATYCHCAAIAg2AhQgACAENgIMQQAhEAyLAQsgACgCBCEEIABBADYCBCAAIAQgCRCtgICAACIERQ10IABBywE2AhwgACAJNgIUIAAgBDYCDEEAIRAMigELIAAoAgQhBCAAQQA2AgQgACAEIAoQrYCAgAAiBEUNciAAQc0BNgIcIAAgCjYCFCAAIAQ2AgxBACEQDIkBCwJAIAstAABBUGoiEEH/AXFBCk8NACAAIBA6ACogC0EBaiEKQbYBIRAMcAsgACgCBCEEIABBADYCBCAAIAQgCxCtgICAACIERQ1wIABBzwE2AhwgACALNgIUIAAgBDYCDEEAIRAMiAELIABBADYCHCAAIAQ2AhQgAEGQs4CAADYCECAAQQg2AgwgAEEANgIAQQAhEAyHAQsgAUEVRg0/IABBADYCHCAAIAw2AhQgAEHMjoCAADYCECAAQSA2AgxBACEQDIYBCyAAQYEEOwEoIAAoAgQhECAAQgA3AwAgACAQIAxBAWoiDBCrgICAACIQRQ04IABB0wE2AhwgACAMNgIUIAAgEDYCDEEAIRAMhQELIABBADYCAAtBACEQIABBADYCHCAAIAQ2AhQgAEHYm4CAADYCECAAQQg2AgwMgwELIAAoAgQhECAAQgA3AwAgACAQIAtBAWoiCxCrgICAACIQDQFBxgEhEAxpCyAAQQI6ACgMVQsgAEHVATYCHCAAIAs2AhQgACAQNgIMQQAhEAyAAQsgEEEVRg03IABBADYCHCAAIAQ2AhQgAEGkjICAADYCECAAQRA2AgxBACEQDH8LIAAtADRBAUcNNCAAIAQgAhC8gICAACIQRQ00IBBBFUcNNSAAQdwBNgIcIAAgBDYCFCAAQdWWgIAANgIQIABBFTYCDEEAIRAMfgtBACEQIABBADYCHCAAQa+LgIAANgIQIABBAjYCDCAAIBRBAWo2AhQMfQtBACEQDGMLQQIhEAxiC0ENIRAMYQtBDyEQDGALQSUhEAxfC0ETIRAMXgtBFSEQDF0LQRYhEAxcC0EXIRAMWwtBGCEQDFoLQRkhEAxZC0EaIRAMWAtBGyEQDFcLQRwhEAxWC0EdIRAMVQtBHyEQDFQLQSEhEAxTC0EjIRAMUgtBxgAhEAxRC0EuIRAMUAtBLyEQDE8LQTshEAxOC0E9IRAMTQtByAAhEAxMC0HJACEQDEsLQcsAIRAMSgtBzAAhEAxJC0HOACEQDEgLQdEAIRAMRwtB1QAhEAxGC0HYACEQDEULQdkAIRAMRAtB2wAhEAxDC0HkACEQDEILQeUAIRAMQQtB8QAhEAxAC0H0ACEQDD8LQY0BIRAMPgtBlwEhEAw9C0GpASEQDDwLQawBIRAMOwtBwAEhEAw6C0G5ASEQDDkLQa8BIRAMOAtBsQEhEAw3C0GyASEQDDYLQbQBIRAMNQtBtQEhEAw0C0G6ASEQDDMLQb0BIRAMMgtBvwEhEAwxC0HBASEQDDALIABBADYCHCAAIAQ2AhQgAEHpi4CAADYCECAAQR82AgxBACEQDEgLIABB2wE2AhwgACAENgIUIABB+paAgAA2AhAgAEEVNgIMQQAhEAxHCyAAQfgANgIcIAAgDDYCFCAAQcqYgIAANgIQIABBFTYCDEEAIRAMRgsgAEHRADYCHCAAIAU2AhQgAEGwl4CAADYCECAAQRU2AgxBACEQDEULIABB+QA2AhwgACABNgIUIAAgEDYCDEEAIRAMRAsgAEH4ADYCHCAAIAE2AhQgAEHKmICAADYCECAAQRU2AgxBACEQDEMLIABB5AA2AhwgACABNgIUIABB45eAgAA2AhAgAEEVNgIMQQAhEAxCCyAAQdcANgIcIAAgATYCFCAAQcmXgIAANgIQIABBFTYCDEEAIRAMQQsgAEEANgIcIAAgATYCFCAAQbmNgIAANgIQIABBGjYCDEEAIRAMQAsgAEHCADYCHCAAIAE2AhQgAEHjmICAADYCECAAQRU2AgxBACEQDD8LIABBADYCBCAAIA8gDxCxgICAACIERQ0BIABBOjYCHCAAIAQ2AgwgACAPQQFqNgIUQQAhEAw+CyAAKAIEIQQgAEEANgIEAkAgACAEIAEQsYCAgAAiBEUNACAAQTs2AhwgACAENgIMIAAgAUEBajYCFEEAIRAMPgsgAUEBaiEBDC0LIA9BAWohAQwtCyAAQQA2AhwgACAPNgIUIABB5JKAgAA2AhAgAEEENgIMQQAhEAw7CyAAQTY2AhwgACAENgIUIAAgAjYCDEEAIRAMOgsgAEEuNgIcIAAgDjYCFCAAIAQ2AgxBACEQDDkLIABB0AA2AhwgACABNgIUIABBkZiAgAA2AhAgAEEVNgIMQQAhEAw4CyANQQFqIQEMLAsgAEEVNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMNgsgAEEbNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMNQsgAEEPNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMNAsgAEELNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMMwsgAEEaNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMMgsgAEELNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMMQsgAEEKNgIcIAAgATYCFCAAQeSWgIAANgIQIABBFTYCDEEAIRAMMAsgAEEeNgIcIAAgATYCFCAAQfmXgIAANgIQIABBFTYCDEEAIRAMLwsgAEEANgIcIAAgEDYCFCAAQdqNgIAANgIQIABBFDYCDEEAIRAMLgsgAEEENgIcIAAgATYCFCAAQbCYgIAANgIQIABBFTYCDEEAIRAMLQsgAEEANgIAIAtBAWohCwtBuAEhEAwSCyAAQQA2AgAgEEEBaiEBQfUAIRAMEQsgASEBAkAgAC0AKUEFRw0AQeMAIRAMEQtB4gAhEAwQC0EAIRAgAEEANgIcIABB5JGAgAA2AhAgAEEHNgIMIAAgFEEBajYCFAwoCyAAQQA2AgAgF0EBaiEBQcAAIRAMDgtBASEBCyAAIAE6ACwgAEEANgIAIBdBAWohAQtBKCEQDAsLIAEhAQtBOCEQDAkLAkAgASIPIAJGDQADQAJAIA8tAABBgL6AgABqLQAAIgFBAUYNACABQQJHDQMgD0EBaiEBDAQLIA9BAWoiDyACRw0AC0E+IRAMIgtBPiEQDCELIABBADoALCAPIQEMAQtBCyEQDAYLQTohEAwFCyABQQFqIQFBLSEQDAQLIAAgAToALCAAQQA2AgAgFkEBaiEBQQwhEAwDCyAAQQA2AgAgF0EBaiEBQQohEAwCCyAAQQA2AgALIABBADoALCANIQFBCSEQDAALC0EAIRAgAEEANgIcIAAgCzYCFCAAQc2QgIAANgIQIABBCTYCDAwXC0EAIRAgAEEANgIcIAAgCjYCFCAAQemKgIAANgIQIABBCTYCDAwWC0EAIRAgAEEANgIcIAAgCTYCFCAAQbeQgIAANgIQIABBCTYCDAwVC0EAIRAgAEEANgIcIAAgCDYCFCAAQZyRgIAANgIQIABBCTYCDAwUC0EAIRAgAEEANgIcIAAgATYCFCAAQc2QgIAANgIQIABBCTYCDAwTC0EAIRAgAEEANgIcIAAgATYCFCAAQemKgIAANgIQIABBCTYCDAwSC0EAIRAgAEEANgIcIAAgATYCFCAAQbeQgIAANgIQIABBCTYCDAwRC0EAIRAgAEEANgIcIAAgATYCFCAAQZyRgIAANgIQIABBCTYCDAwQC0EAIRAgAEEANgIcIAAgATYCFCAAQZeVgIAANgIQIABBDzYCDAwPC0EAIRAgAEEANgIcIAAgATYCFCAAQZeVgIAANgIQIABBDzYCDAwOC0EAIRAgAEEANgIcIAAgATYCFCAAQcCSgIAANgIQIABBCzYCDAwNC0EAIRAgAEEANgIcIAAgATYCFCAAQZWJgIAANgIQIABBCzYCDAwMC0EAIRAgAEEANgIcIAAgATYCFCAAQeGPgIAANgIQIABBCjYCDAwLC0EAIRAgAEEANgIcIAAgATYCFCAAQfuPgIAANgIQIABBCjYCDAwKC0EAIRAgAEEANgIcIAAgATYCFCAAQfGZgIAANgIQIABBAjYCDAwJC0EAIRAgAEEANgIcIAAgATYCFCAAQcSUgIAANgIQIABBAjYCDAwIC0EAIRAgAEEANgIcIAAgATYCFCAAQfKVgIAANgIQIABBAjYCDAwHCyAAQQI2AhwgACABNgIUIABBnJqAgAA2AhAgAEEWNgIMQQAhEAwGC0EBIRAMBQtB1AAhECABIgQgAkYNBCADQQhqIAAgBCACQdjCgIAAQQoQxYCAgAAgAygCDCEEIAMoAggOAwEEAgALEMqAgIAAAAsgAEEANgIcIABBtZqAgAA2AhAgAEEXNgIMIAAgBEEBajYCFEEAIRAMAgsgAEEANgIcIAAgBDYCFCAAQcqagIAANgIQIABBCTYCDEEAIRAMAQsCQCABIgQgAkcNAEEiIRAMAQsgAEGJgICAADYCCCAAIAQ2AgRBISEQCyADQRBqJICAgIAAIBALrwEBAn8gASgCACEGAkACQCACIANGDQAgBCAGaiEEIAYgA2ogAmshByACIAZBf3MgBWoiBmohBQNAAkAgAi0AACAELQAARg0AQQIhBAwDCwJAIAYNAEEAIQQgBSECDAMLIAZBf2ohBiAEQQFqIQQgAkEBaiICIANHDQALIAchBiADIQILIABBATYCACABIAY2AgAgACACNgIEDwsgAUEANgIAIAAgBDYCACAAIAI2AgQLCgAgABDHgICAAAvyNgELfyOAgICAAEEQayIBJICAgIAAAkBBACgCoNCAgAANAEEAEMuAgIAAQYDUhIAAayICQdkASQ0AQQAhAwJAQQAoAuDTgIAAIgQNAEEAQn83AuzTgIAAQQBCgICEgICAwAA3AuTTgIAAQQAgAUEIakFwcUHYqtWqBXMiBDYC4NOAgABBAEEANgL004CAAEEAQQA2AsTTgIAAC0EAIAI2AszTgIAAQQBBgNSEgAA2AsjTgIAAQQBBgNSEgAA2ApjQgIAAQQAgBDYCrNCAgABBAEF/NgKo0ICAAANAIANBxNCAgABqIANBuNCAgABqIgQ2AgAgBCADQbDQgIAAaiIFNgIAIANBvNCAgABqIAU2AgAgA0HM0ICAAGogA0HA0ICAAGoiBTYCACAFIAQ2AgAgA0HU0ICAAGogA0HI0ICAAGoiBDYCACAEIAU2AgAgA0HQ0ICAAGogBDYCACADQSBqIgNBgAJHDQALQYDUhIAAQXhBgNSEgABrQQ9xQQBBgNSEgABBCGpBD3EbIgNqIgRBBGogAkFIaiIFIANrIgNBAXI2AgBBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAQ2AqDQgIAAQYDUhIAAIAVqQTg2AgQLAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABB7AFLDQACQEEAKAKI0ICAACIGQRAgAEETakFwcSAAQQtJGyICQQN2IgR2IgNBA3FFDQACQAJAIANBAXEgBHJBAXMiBUEDdCIEQbDQgIAAaiIDIARBuNCAgABqKAIAIgQoAggiAkcNAEEAIAZBfiAFd3E2AojQgIAADAELIAMgAjYCCCACIAM2AgwLIARBCGohAyAEIAVBA3QiBUEDcjYCBCAEIAVqIgQgBCgCBEEBcjYCBAwMCyACQQAoApDQgIAAIgdNDQECQCADRQ0AAkACQCADIAR0QQIgBHQiA0EAIANrcnEiA0EAIANrcUF/aiIDIANBDHZBEHEiA3YiBEEFdkEIcSIFIANyIAQgBXYiA0ECdkEEcSIEciADIAR2IgNBAXZBAnEiBHIgAyAEdiIDQQF2QQFxIgRyIAMgBHZqIgRBA3QiA0Gw0ICAAGoiBSADQbjQgIAAaigCACIDKAIIIgBHDQBBACAGQX4gBHdxIgY2AojQgIAADAELIAUgADYCCCAAIAU2AgwLIAMgAkEDcjYCBCADIARBA3QiBGogBCACayIFNgIAIAMgAmoiACAFQQFyNgIEAkAgB0UNACAHQXhxQbDQgIAAaiECQQAoApzQgIAAIQQCQAJAIAZBASAHQQN2dCIIcQ0AQQAgBiAIcjYCiNCAgAAgAiEIDAELIAIoAgghCAsgCCAENgIMIAIgBDYCCCAEIAI2AgwgBCAINgIICyADQQhqIQNBACAANgKc0ICAAEEAIAU2ApDQgIAADAwLQQAoAozQgIAAIglFDQEgCUEAIAlrcUF/aiIDIANBDHZBEHEiA3YiBEEFdkEIcSIFIANyIAQgBXYiA0ECdkEEcSIEciADIAR2IgNBAXZBAnEiBHIgAyAEdiIDQQF2QQFxIgRyIAMgBHZqQQJ0QbjSgIAAaigCACIAKAIEQXhxIAJrIQQgACEFAkADQAJAIAUoAhAiAw0AIAVBFGooAgAiA0UNAgsgAygCBEF4cSACayIFIAQgBSAESSIFGyEEIAMgACAFGyEAIAMhBQwACwsgACgCGCEKAkAgACgCDCIIIABGDQAgACgCCCIDQQAoApjQgIAASRogCCADNgIIIAMgCDYCDAwLCwJAIABBFGoiBSgCACIDDQAgACgCECIDRQ0DIABBEGohBQsDQCAFIQsgAyIIQRRqIgUoAgAiAw0AIAhBEGohBSAIKAIQIgMNAAsgC0EANgIADAoLQX8hAiAAQb9/Sw0AIABBE2oiA0FwcSECQQAoAozQgIAAIgdFDQBBACELAkAgAkGAAkkNAEEfIQsgAkH///8HSw0AIANBCHYiAyADQYD+P2pBEHZBCHEiA3QiBCAEQYDgH2pBEHZBBHEiBHQiBSAFQYCAD2pBEHZBAnEiBXRBD3YgAyAEciAFcmsiA0EBdCACIANBFWp2QQFxckEcaiELC0EAIAJrIQQCQAJAAkACQCALQQJ0QbjSgIAAaigCACIFDQBBACEDQQAhCAwBC0EAIQMgAkEAQRkgC0EBdmsgC0EfRht0IQBBACEIA0ACQCAFKAIEQXhxIAJrIgYgBE8NACAGIQQgBSEIIAYNAEEAIQQgBSEIIAUhAwwDCyADIAVBFGooAgAiBiAGIAUgAEEddkEEcWpBEGooAgAiBUYbIAMgBhshAyAAQQF0IQAgBQ0ACwsCQCADIAhyDQBBACEIQQIgC3QiA0EAIANrciAHcSIDRQ0DIANBACADa3FBf2oiAyADQQx2QRBxIgN2IgVBBXZBCHEiACADciAFIAB2IgNBAnZBBHEiBXIgAyAFdiIDQQF2QQJxIgVyIAMgBXYiA0EBdkEBcSIFciADIAV2akECdEG40oCAAGooAgAhAwsgA0UNAQsDQCADKAIEQXhxIAJrIgYgBEkhAAJAIAMoAhAiBQ0AIANBFGooAgAhBQsgBiAEIAAbIQQgAyAIIAAbIQggBSEDIAUNAAsLIAhFDQAgBEEAKAKQ0ICAACACa08NACAIKAIYIQsCQCAIKAIMIgAgCEYNACAIKAIIIgNBACgCmNCAgABJGiAAIAM2AgggAyAANgIMDAkLAkAgCEEUaiIFKAIAIgMNACAIKAIQIgNFDQMgCEEQaiEFCwNAIAUhBiADIgBBFGoiBSgCACIDDQAgAEEQaiEFIAAoAhAiAw0ACyAGQQA2AgAMCAsCQEEAKAKQ0ICAACIDIAJJDQBBACgCnNCAgAAhBAJAAkAgAyACayIFQRBJDQAgBCACaiIAIAVBAXI2AgRBACAFNgKQ0ICAAEEAIAA2ApzQgIAAIAQgA2ogBTYCACAEIAJBA3I2AgQMAQsgBCADQQNyNgIEIAQgA2oiAyADKAIEQQFyNgIEQQBBADYCnNCAgABBAEEANgKQ0ICAAAsgBEEIaiEDDAoLAkBBACgClNCAgAAiACACTQ0AQQAoAqDQgIAAIgMgAmoiBCAAIAJrIgVBAXI2AgRBACAFNgKU0ICAAEEAIAQ2AqDQgIAAIAMgAkEDcjYCBCADQQhqIQMMCgsCQAJAQQAoAuDTgIAARQ0AQQAoAujTgIAAIQQMAQtBAEJ/NwLs04CAAEEAQoCAhICAgMAANwLk04CAAEEAIAFBDGpBcHFB2KrVqgVzNgLg04CAAEEAQQA2AvTTgIAAQQBBADYCxNOAgABBgIAEIQQLQQAhAwJAIAQgAkHHAGoiB2oiBkEAIARrIgtxIgggAksNAEEAQTA2AvjTgIAADAoLAkBBACgCwNOAgAAiA0UNAAJAQQAoArjTgIAAIgQgCGoiBSAETQ0AIAUgA00NAQtBACEDQQBBMDYC+NOAgAAMCgtBAC0AxNOAgABBBHENBAJAAkACQEEAKAKg0ICAACIERQ0AQcjTgIAAIQMDQAJAIAMoAgAiBSAESw0AIAUgAygCBGogBEsNAwsgAygCCCIDDQALC0EAEMuAgIAAIgBBf0YNBSAIIQYCQEEAKALk04CAACIDQX9qIgQgAHFFDQAgCCAAayAEIABqQQAgA2txaiEGCyAGIAJNDQUgBkH+////B0sNBQJAQQAoAsDTgIAAIgNFDQBBACgCuNOAgAAiBCAGaiIFIARNDQYgBSADSw0GCyAGEMuAgIAAIgMgAEcNAQwHCyAGIABrIAtxIgZB/v///wdLDQQgBhDLgICAACIAIAMoAgAgAygCBGpGDQMgACEDCwJAIANBf0YNACACQcgAaiAGTQ0AAkAgByAGa0EAKALo04CAACIEakEAIARrcSIEQf7///8HTQ0AIAMhAAwHCwJAIAQQy4CAgABBf0YNACAEIAZqIQYgAyEADAcLQQAgBmsQy4CAgAAaDAQLIAMhACADQX9HDQUMAwtBACEIDAcLQQAhAAwFCyAAQX9HDQILQQBBACgCxNOAgABBBHI2AsTTgIAACyAIQf7///8HSw0BIAgQy4CAgAAhAEEAEMuAgIAAIQMgAEF/Rg0BIANBf0YNASAAIANPDQEgAyAAayIGIAJBOGpNDQELQQBBACgCuNOAgAAgBmoiAzYCuNOAgAACQCADQQAoArzTgIAATQ0AQQAgAzYCvNOAgAALAkACQAJAAkBBACgCoNCAgAAiBEUNAEHI04CAACEDA0AgACADKAIAIgUgAygCBCIIakYNAiADKAIIIgMNAAwDCwsCQAJAQQAoApjQgIAAIgNFDQAgACADTw0BC0EAIAA2ApjQgIAAC0EAIQNBACAGNgLM04CAAEEAIAA2AsjTgIAAQQBBfzYCqNCAgABBAEEAKALg04CAADYCrNCAgABBAEEANgLU04CAAANAIANBxNCAgABqIANBuNCAgABqIgQ2AgAgBCADQbDQgIAAaiIFNgIAIANBvNCAgABqIAU2AgAgA0HM0ICAAGogA0HA0ICAAGoiBTYCACAFIAQ2AgAgA0HU0ICAAGogA0HI0ICAAGoiBDYCACAEIAU2AgAgA0HQ0ICAAGogBDYCACADQSBqIgNBgAJHDQALIABBeCAAa0EPcUEAIABBCGpBD3EbIgNqIgQgBkFIaiIFIANrIgNBAXI2AgRBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAQ2AqDQgIAAIAAgBWpBODYCBAwCCyADLQAMQQhxDQAgBCAFSQ0AIAQgAE8NACAEQXggBGtBD3FBACAEQQhqQQ9xGyIFaiIAQQAoApTQgIAAIAZqIgsgBWsiBUEBcjYCBCADIAggBmo2AgRBAEEAKALw04CAADYCpNCAgABBACAFNgKU0ICAAEEAIAA2AqDQgIAAIAQgC2pBODYCBAwBCwJAIABBACgCmNCAgAAiCE8NAEEAIAA2ApjQgIAAIAAhCAsgACAGaiEFQcjTgIAAIQMCQAJAAkACQAJAAkACQANAIAMoAgAgBUYNASADKAIIIgMNAAwCCwsgAy0ADEEIcUUNAQtByNOAgAAhAwNAAkAgAygCACIFIARLDQAgBSADKAIEaiIFIARLDQMLIAMoAgghAwwACwsgAyAANgIAIAMgAygCBCAGajYCBCAAQXggAGtBD3FBACAAQQhqQQ9xG2oiCyACQQNyNgIEIAVBeCAFa0EPcUEAIAVBCGpBD3EbaiIGIAsgAmoiAmshAwJAIAYgBEcNAEEAIAI2AqDQgIAAQQBBACgClNCAgAAgA2oiAzYClNCAgAAgAiADQQFyNgIEDAMLAkAgBkEAKAKc0ICAAEcNAEEAIAI2ApzQgIAAQQBBACgCkNCAgAAgA2oiAzYCkNCAgAAgAiADQQFyNgIEIAIgA2ogAzYCAAwDCwJAIAYoAgQiBEEDcUEBRw0AIARBeHEhBwJAAkAgBEH/AUsNACAGKAIIIgUgBEEDdiIIQQN0QbDQgIAAaiIARhoCQCAGKAIMIgQgBUcNAEEAQQAoAojQgIAAQX4gCHdxNgKI0ICAAAwCCyAEIABGGiAEIAU2AgggBSAENgIMDAELIAYoAhghCQJAAkAgBigCDCIAIAZGDQAgBigCCCIEIAhJGiAAIAQ2AgggBCAANgIMDAELAkAgBkEUaiIEKAIAIgUNACAGQRBqIgQoAgAiBQ0AQQAhAAwBCwNAIAQhCCAFIgBBFGoiBCgCACIFDQAgAEEQaiEEIAAoAhAiBQ0ACyAIQQA2AgALIAlFDQACQAJAIAYgBigCHCIFQQJ0QbjSgIAAaiIEKAIARw0AIAQgADYCACAADQFBAEEAKAKM0ICAAEF+IAV3cTYCjNCAgAAMAgsgCUEQQRQgCSgCECAGRhtqIAA2AgAgAEUNAQsgACAJNgIYAkAgBigCECIERQ0AIAAgBDYCECAEIAA2AhgLIAYoAhQiBEUNACAAQRRqIAQ2AgAgBCAANgIYCyAHIANqIQMgBiAHaiIGKAIEIQQLIAYgBEF+cTYCBCACIANqIAM2AgAgAiADQQFyNgIEAkAgA0H/AUsNACADQXhxQbDQgIAAaiEEAkACQEEAKAKI0ICAACIFQQEgA0EDdnQiA3ENAEEAIAUgA3I2AojQgIAAIAQhAwwBCyAEKAIIIQMLIAMgAjYCDCAEIAI2AgggAiAENgIMIAIgAzYCCAwDC0EfIQQCQCADQf///wdLDQAgA0EIdiIEIARBgP4/akEQdkEIcSIEdCIFIAVBgOAfakEQdkEEcSIFdCIAIABBgIAPakEQdkECcSIAdEEPdiAEIAVyIAByayIEQQF0IAMgBEEVanZBAXFyQRxqIQQLIAIgBDYCHCACQgA3AhAgBEECdEG40oCAAGohBQJAQQAoAozQgIAAIgBBASAEdCIIcQ0AIAUgAjYCAEEAIAAgCHI2AozQgIAAIAIgBTYCGCACIAI2AgggAiACNgIMDAMLIANBAEEZIARBAXZrIARBH0YbdCEEIAUoAgAhAANAIAAiBSgCBEF4cSADRg0CIARBHXYhACAEQQF0IQQgBSAAQQRxakEQaiIIKAIAIgANAAsgCCACNgIAIAIgBTYCGCACIAI2AgwgAiACNgIIDAILIABBeCAAa0EPcUEAIABBCGpBD3EbIgNqIgsgBkFIaiIIIANrIgNBAXI2AgQgACAIakE4NgIEIAQgBUE3IAVrQQ9xQQAgBUFJakEPcRtqQUFqIgggCCAEQRBqSRsiCEEjNgIEQQBBACgC8NOAgAA2AqTQgIAAQQAgAzYClNCAgABBACALNgKg0ICAACAIQRBqQQApAtDTgIAANwIAIAhBACkCyNOAgAA3AghBACAIQQhqNgLQ04CAAEEAIAY2AszTgIAAQQAgADYCyNOAgABBAEEANgLU04CAACAIQSRqIQMDQCADQQc2AgAgA0EEaiIDIAVJDQALIAggBEYNAyAIIAgoAgRBfnE2AgQgCCAIIARrIgA2AgAgBCAAQQFyNgIEAkAgAEH/AUsNACAAQXhxQbDQgIAAaiEDAkACQEEAKAKI0ICAACIFQQEgAEEDdnQiAHENAEEAIAUgAHI2AojQgIAAIAMhBQwBCyADKAIIIQULIAUgBDYCDCADIAQ2AgggBCADNgIMIAQgBTYCCAwEC0EfIQMCQCAAQf///wdLDQAgAEEIdiIDIANBgP4/akEQdkEIcSIDdCIFIAVBgOAfakEQdkEEcSIFdCIIIAhBgIAPakEQdkECcSIIdEEPdiADIAVyIAhyayIDQQF0IAAgA0EVanZBAXFyQRxqIQMLIAQgAzYCHCAEQgA3AhAgA0ECdEG40oCAAGohBQJAQQAoAozQgIAAIghBASADdCIGcQ0AIAUgBDYCAEEAIAggBnI2AozQgIAAIAQgBTYCGCAEIAQ2AgggBCAENgIMDAQLIABBAEEZIANBAXZrIANBH0YbdCEDIAUoAgAhCANAIAgiBSgCBEF4cSAARg0DIANBHXYhCCADQQF0IQMgBSAIQQRxakEQaiIGKAIAIggNAAsgBiAENgIAIAQgBTYCGCAEIAQ2AgwgBCAENgIIDAMLIAUoAggiAyACNgIMIAUgAjYCCCACQQA2AhggAiAFNgIMIAIgAzYCCAsgC0EIaiEDDAULIAUoAggiAyAENgIMIAUgBDYCCCAEQQA2AhggBCAFNgIMIAQgAzYCCAtBACgClNCAgAAiAyACTQ0AQQAoAqDQgIAAIgQgAmoiBSADIAJrIgNBAXI2AgRBACADNgKU0ICAAEEAIAU2AqDQgIAAIAQgAkEDcjYCBCAEQQhqIQMMAwtBACEDQQBBMDYC+NOAgAAMAgsCQCALRQ0AAkACQCAIIAgoAhwiBUECdEG40oCAAGoiAygCAEcNACADIAA2AgAgAA0BQQAgB0F+IAV3cSIHNgKM0ICAAAwCCyALQRBBFCALKAIQIAhGG2ogADYCACAARQ0BCyAAIAs2AhgCQCAIKAIQIgNFDQAgACADNgIQIAMgADYCGAsgCEEUaigCACIDRQ0AIABBFGogAzYCACADIAA2AhgLAkACQCAEQQ9LDQAgCCAEIAJqIgNBA3I2AgQgCCADaiIDIAMoAgRBAXI2AgQMAQsgCCACaiIAIARBAXI2AgQgCCACQQNyNgIEIAAgBGogBDYCAAJAIARB/wFLDQAgBEF4cUGw0ICAAGohAwJAAkBBACgCiNCAgAAiBUEBIARBA3Z0IgRxDQBBACAFIARyNgKI0ICAACADIQQMAQsgAygCCCEECyAEIAA2AgwgAyAANgIIIAAgAzYCDCAAIAQ2AggMAQtBHyEDAkAgBEH///8HSw0AIARBCHYiAyADQYD+P2pBEHZBCHEiA3QiBSAFQYDgH2pBEHZBBHEiBXQiAiACQYCAD2pBEHZBAnEiAnRBD3YgAyAFciACcmsiA0EBdCAEIANBFWp2QQFxckEcaiEDCyAAIAM2AhwgAEIANwIQIANBAnRBuNKAgABqIQUCQCAHQQEgA3QiAnENACAFIAA2AgBBACAHIAJyNgKM0ICAACAAIAU2AhggACAANgIIIAAgADYCDAwBCyAEQQBBGSADQQF2ayADQR9GG3QhAyAFKAIAIQICQANAIAIiBSgCBEF4cSAERg0BIANBHXYhAiADQQF0IQMgBSACQQRxakEQaiIGKAIAIgINAAsgBiAANgIAIAAgBTYCGCAAIAA2AgwgACAANgIIDAELIAUoAggiAyAANgIMIAUgADYCCCAAQQA2AhggACAFNgIMIAAgAzYCCAsgCEEIaiEDDAELAkAgCkUNAAJAAkAgACAAKAIcIgVBAnRBuNKAgABqIgMoAgBHDQAgAyAINgIAIAgNAUEAIAlBfiAFd3E2AozQgIAADAILIApBEEEUIAooAhAgAEYbaiAINgIAIAhFDQELIAggCjYCGAJAIAAoAhAiA0UNACAIIAM2AhAgAyAINgIYCyAAQRRqKAIAIgNFDQAgCEEUaiADNgIAIAMgCDYCGAsCQAJAIARBD0sNACAAIAQgAmoiA0EDcjYCBCAAIANqIgMgAygCBEEBcjYCBAwBCyAAIAJqIgUgBEEBcjYCBCAAIAJBA3I2AgQgBSAEaiAENgIAAkAgB0UNACAHQXhxQbDQgIAAaiECQQAoApzQgIAAIQMCQAJAQQEgB0EDdnQiCCAGcQ0AQQAgCCAGcjYCiNCAgAAgAiEIDAELIAIoAgghCAsgCCADNgIMIAIgAzYCCCADIAI2AgwgAyAINgIIC0EAIAU2ApzQgIAAQQAgBDYCkNCAgAALIABBCGohAwsgAUEQaiSAgICAACADCwoAIAAQyYCAgAAL4g0BB38CQCAARQ0AIABBeGoiASAAQXxqKAIAIgJBeHEiAGohAwJAIAJBAXENACACQQNxRQ0BIAEgASgCACICayIBQQAoApjQgIAAIgRJDQEgAiAAaiEAAkAgAUEAKAKc0ICAAEYNAAJAIAJB/wFLDQAgASgCCCIEIAJBA3YiBUEDdEGw0ICAAGoiBkYaAkAgASgCDCICIARHDQBBAEEAKAKI0ICAAEF+IAV3cTYCiNCAgAAMAwsgAiAGRhogAiAENgIIIAQgAjYCDAwCCyABKAIYIQcCQAJAIAEoAgwiBiABRg0AIAEoAggiAiAESRogBiACNgIIIAIgBjYCDAwBCwJAIAFBFGoiAigCACIEDQAgAUEQaiICKAIAIgQNAEEAIQYMAQsDQCACIQUgBCIGQRRqIgIoAgAiBA0AIAZBEGohAiAGKAIQIgQNAAsgBUEANgIACyAHRQ0BAkACQCABIAEoAhwiBEECdEG40oCAAGoiAigCAEcNACACIAY2AgAgBg0BQQBBACgCjNCAgABBfiAEd3E2AozQgIAADAMLIAdBEEEUIAcoAhAgAUYbaiAGNgIAIAZFDQILIAYgBzYCGAJAIAEoAhAiAkUNACAGIAI2AhAgAiAGNgIYCyABKAIUIgJFDQEgBkEUaiACNgIAIAIgBjYCGAwBCyADKAIEIgJBA3FBA0cNACADIAJBfnE2AgRBACAANgKQ0ICAACABIABqIAA2AgAgASAAQQFyNgIEDwsgASADTw0AIAMoAgQiAkEBcUUNAAJAAkAgAkECcQ0AAkAgA0EAKAKg0ICAAEcNAEEAIAE2AqDQgIAAQQBBACgClNCAgAAgAGoiADYClNCAgAAgASAAQQFyNgIEIAFBACgCnNCAgABHDQNBAEEANgKQ0ICAAEEAQQA2ApzQgIAADwsCQCADQQAoApzQgIAARw0AQQAgATYCnNCAgABBAEEAKAKQ0ICAACAAaiIANgKQ0ICAACABIABBAXI2AgQgASAAaiAANgIADwsgAkF4cSAAaiEAAkACQCACQf8BSw0AIAMoAggiBCACQQN2IgVBA3RBsNCAgABqIgZGGgJAIAMoAgwiAiAERw0AQQBBACgCiNCAgABBfiAFd3E2AojQgIAADAILIAIgBkYaIAIgBDYCCCAEIAI2AgwMAQsgAygCGCEHAkACQCADKAIMIgYgA0YNACADKAIIIgJBACgCmNCAgABJGiAGIAI2AgggAiAGNgIMDAELAkAgA0EUaiICKAIAIgQNACADQRBqIgIoAgAiBA0AQQAhBgwBCwNAIAIhBSAEIgZBFGoiAigCACIEDQAgBkEQaiECIAYoAhAiBA0ACyAFQQA2AgALIAdFDQACQAJAIAMgAygCHCIEQQJ0QbjSgIAAaiICKAIARw0AIAIgBjYCACAGDQFBAEEAKAKM0ICAAEF+IAR3cTYCjNCAgAAMAgsgB0EQQRQgBygCECADRhtqIAY2AgAgBkUNAQsgBiAHNgIYAkAgAygCECICRQ0AIAYgAjYCECACIAY2AhgLIAMoAhQiAkUNACAGQRRqIAI2AgAgAiAGNgIYCyABIABqIAA2AgAgASAAQQFyNgIEIAFBACgCnNCAgABHDQFBACAANgKQ0ICAAA8LIAMgAkF+cTYCBCABIABqIAA2AgAgASAAQQFyNgIECwJAIABB/wFLDQAgAEF4cUGw0ICAAGohAgJAAkBBACgCiNCAgAAiBEEBIABBA3Z0IgBxDQBBACAEIAByNgKI0ICAACACIQAMAQsgAigCCCEACyAAIAE2AgwgAiABNgIIIAEgAjYCDCABIAA2AggPC0EfIQICQCAAQf///wdLDQAgAEEIdiICIAJBgP4/akEQdkEIcSICdCIEIARBgOAfakEQdkEEcSIEdCIGIAZBgIAPakEQdkECcSIGdEEPdiACIARyIAZyayICQQF0IAAgAkEVanZBAXFyQRxqIQILIAEgAjYCHCABQgA3AhAgAkECdEG40oCAAGohBAJAAkBBACgCjNCAgAAiBkEBIAJ0IgNxDQAgBCABNgIAQQAgBiADcjYCjNCAgAAgASAENgIYIAEgATYCCCABIAE2AgwMAQsgAEEAQRkgAkEBdmsgAkEfRht0IQIgBCgCACEGAkADQCAGIgQoAgRBeHEgAEYNASACQR12IQYgAkEBdCECIAQgBkEEcWpBEGoiAygCACIGDQALIAMgATYCACABIAQ2AhggASABNgIMIAEgATYCCAwBCyAEKAIIIgAgATYCDCAEIAE2AgggAUEANgIYIAEgBDYCDCABIAA2AggLQQBBACgCqNCAgABBf2oiAUF/IAEbNgKo0ICAAAsLBAAAAAtOAAJAIAANAD8AQRB0DwsCQCAAQf//A3ENACAAQX9MDQACQCAAQRB2QAAiAEF/Rw0AQQBBMDYC+NOAgABBfw8LIABBEHQPCxDKgICAAAAL8gICA38BfgJAIAJFDQAgACABOgAAIAIgAGoiA0F/aiABOgAAIAJBA0kNACAAIAE6AAIgACABOgABIANBfWogAToAACADQX5qIAE6AAAgAkEHSQ0AIAAgAToAAyADQXxqIAE6AAAgAkEJSQ0AIABBACAAa0EDcSIEaiIDIAFB/wFxQYGChAhsIgE2AgAgAyACIARrQXxxIgRqIgJBfGogATYCACAEQQlJDQAgAyABNgIIIAMgATYCBCACQXhqIAE2AgAgAkF0aiABNgIAIARBGUkNACADIAE2AhggAyABNgIUIAMgATYCECADIAE2AgwgAkFwaiABNgIAIAJBbGogATYCACACQWhqIAE2AgAgAkFkaiABNgIAIAQgA0EEcUEYciIFayICQSBJDQAgAa1CgYCAgBB+IQYgAyAFaiEBA0AgASAGNwMYIAEgBjcDECABIAY3AwggASAGNwMAIAFBIGohASACQWBqIgJBH0sNAAsLIAALC45IAQBBgAgLhkgBAAAAAgAAAAMAAAAAAAAAAAAAAAQAAAAFAAAAAAAAAAAAAAAGAAAABwAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEludmFsaWQgY2hhciBpbiB1cmwgcXVlcnkAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9ib2R5AENvbnRlbnQtTGVuZ3RoIG92ZXJmbG93AENodW5rIHNpemUgb3ZlcmZsb3cAUmVzcG9uc2Ugb3ZlcmZsb3cASW52YWxpZCBtZXRob2QgZm9yIEhUVFAveC54IHJlcXVlc3QASW52YWxpZCBtZXRob2QgZm9yIFJUU1AveC54IHJlcXVlc3QARXhwZWN0ZWQgU09VUkNFIG1ldGhvZCBmb3IgSUNFL3gueCByZXF1ZXN0AEludmFsaWQgY2hhciBpbiB1cmwgZnJhZ21lbnQgc3RhcnQARXhwZWN0ZWQgZG90AFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25fc3RhdHVzAEludmFsaWQgcmVzcG9uc2Ugc3RhdHVzAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMAVXNlciBjYWxsYmFjayBlcnJvcgBgb25fcmVzZXRgIGNhbGxiYWNrIGVycm9yAGBvbl9jaHVua19oZWFkZXJgIGNhbGxiYWNrIGVycm9yAGBvbl9tZXNzYWdlX2JlZ2luYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlYCBjYWxsYmFjayBlcnJvcgBgb25fc3RhdHVzX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fdmVyc2lvbl9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX3VybF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25faGVhZGVyX3ZhbHVlX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fbWVzc2FnZV9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX21ldGhvZF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2hlYWRlcl9maWVsZF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lYCBjYWxsYmFjayBlcnJvcgBVbmV4cGVjdGVkIGNoYXIgaW4gdXJsIHNlcnZlcgBJbnZhbGlkIGhlYWRlciB2YWx1ZSBjaGFyAEludmFsaWQgaGVhZGVyIGZpZWxkIGNoYXIAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl92ZXJzaW9uAEludmFsaWQgbWlub3IgdmVyc2lvbgBJbnZhbGlkIG1ham9yIHZlcnNpb24ARXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgdmVyc2lvbgBFeHBlY3RlZCBDUkxGIGFmdGVyIHZlcnNpb24ASW52YWxpZCBIVFRQIHZlcnNpb24ASW52YWxpZCBoZWFkZXIgdG9rZW4AU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl91cmwASW52YWxpZCBjaGFyYWN0ZXJzIGluIHVybABVbmV4cGVjdGVkIHN0YXJ0IGNoYXIgaW4gdXJsAERvdWJsZSBAIGluIHVybABFbXB0eSBDb250ZW50LUxlbmd0aABJbnZhbGlkIGNoYXJhY3RlciBpbiBDb250ZW50LUxlbmd0aABEdXBsaWNhdGUgQ29udGVudC1MZW5ndGgASW52YWxpZCBjaGFyIGluIHVybCBwYXRoAENvbnRlbnQtTGVuZ3RoIGNhbid0IGJlIHByZXNlbnQgd2l0aCBUcmFuc2Zlci1FbmNvZGluZwBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBzaXplAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25faGVhZGVyX3ZhbHVlAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgdmFsdWUATWlzc2luZyBleHBlY3RlZCBMRiBhZnRlciBoZWFkZXIgdmFsdWUASW52YWxpZCBgVHJhbnNmZXItRW5jb2RpbmdgIGhlYWRlciB2YWx1ZQBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBleHRlbnNpb25zIHF1b3RlIHZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgcXVvdGVkIHZhbHVlAFBhdXNlZCBieSBvbl9oZWFkZXJzX2NvbXBsZXRlAEludmFsaWQgRU9GIHN0YXRlAG9uX3Jlc2V0IHBhdXNlAG9uX2NodW5rX2hlYWRlciBwYXVzZQBvbl9tZXNzYWdlX2JlZ2luIHBhdXNlAG9uX2NodW5rX2V4dGVuc2lvbl92YWx1ZSBwYXVzZQBvbl9zdGF0dXNfY29tcGxldGUgcGF1c2UAb25fdmVyc2lvbl9jb21wbGV0ZSBwYXVzZQBvbl91cmxfY29tcGxldGUgcGF1c2UAb25fY2h1bmtfY29tcGxldGUgcGF1c2UAb25faGVhZGVyX3ZhbHVlX2NvbXBsZXRlIHBhdXNlAG9uX21lc3NhZ2VfY29tcGxldGUgcGF1c2UAb25fbWV0aG9kX2NvbXBsZXRlIHBhdXNlAG9uX2hlYWRlcl9maWVsZF9jb21wbGV0ZSBwYXVzZQBvbl9jaHVua19leHRlbnNpb25fbmFtZSBwYXVzZQBVbmV4cGVjdGVkIHNwYWNlIGFmdGVyIHN0YXJ0IGxpbmUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9jaHVua19leHRlbnNpb25fbmFtZQBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBleHRlbnNpb25zIG5hbWUAUGF1c2Ugb24gQ09OTkVDVC9VcGdyYWRlAFBhdXNlIG9uIFBSSS9VcGdyYWRlAEV4cGVjdGVkIEhUVFAvMiBDb25uZWN0aW9uIFByZWZhY2UAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9tZXRob2QARXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgbWV0aG9kAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25faGVhZGVyX2ZpZWxkAFBhdXNlZABJbnZhbGlkIHdvcmQgZW5jb3VudGVyZWQASW52YWxpZCBtZXRob2QgZW5jb3VudGVyZWQAVW5leHBlY3RlZCBjaGFyIGluIHVybCBzY2hlbWEAUmVxdWVzdCBoYXMgaW52YWxpZCBgVHJhbnNmZXItRW5jb2RpbmdgAFNXSVRDSF9QUk9YWQBVU0VfUFJPWFkATUtBQ1RJVklUWQBVTlBST0NFU1NBQkxFX0VOVElUWQBDT1BZAE1PVkVEX1BFUk1BTkVOVExZAFRPT19FQVJMWQBOT1RJRlkARkFJTEVEX0RFUEVOREVOQ1kAQkFEX0dBVEVXQVkAUExBWQBQVVQAQ0hFQ0tPVVQAR0FURVdBWV9USU1FT1VUAFJFUVVFU1RfVElNRU9VVABORVRXT1JLX0NPTk5FQ1RfVElNRU9VVABDT05ORUNUSU9OX1RJTUVPVVQATE9HSU5fVElNRU9VVABORVRXT1JLX1JFQURfVElNRU9VVABQT1NUAE1JU0RJUkVDVEVEX1JFUVVFU1QAQ0xJRU5UX0NMT1NFRF9SRVFVRVNUAENMSUVOVF9DTE9TRURfTE9BRF9CQUxBTkNFRF9SRVFVRVNUAEJBRF9SRVFVRVNUAEhUVFBfUkVRVUVTVF9TRU5UX1RPX0hUVFBTX1BPUlQAUkVQT1JUAElNX0FfVEVBUE9UAFJFU0VUX0NPTlRFTlQATk9fQ09OVEVOVABQQVJUSUFMX0NPTlRFTlQASFBFX0lOVkFMSURfQ09OU1RBTlQASFBFX0NCX1JFU0VUAEdFVABIUEVfU1RSSUNUAENPTkZMSUNUAFRFTVBPUkFSWV9SRURJUkVDVABQRVJNQU5FTlRfUkVESVJFQ1QAQ09OTkVDVABNVUxUSV9TVEFUVVMASFBFX0lOVkFMSURfU1RBVFVTAFRPT19NQU5ZX1JFUVVFU1RTAEVBUkxZX0hJTlRTAFVOQVZBSUxBQkxFX0ZPUl9MRUdBTF9SRUFTT05TAE9QVElPTlMAU1dJVENISU5HX1BST1RPQ09MUwBWQVJJQU5UX0FMU09fTkVHT1RJQVRFUwBNVUxUSVBMRV9DSE9JQ0VTAElOVEVSTkFMX1NFUlZFUl9FUlJPUgBXRUJfU0VSVkVSX1VOS05PV05fRVJST1IAUkFJTEdVTl9FUlJPUgBJREVOVElUWV9QUk9WSURFUl9BVVRIRU5USUNBVElPTl9FUlJPUgBTU0xfQ0VSVElGSUNBVEVfRVJST1IASU5WQUxJRF9YX0ZPUldBUkRFRF9GT1IAU0VUX1BBUkFNRVRFUgBHRVRfUEFSQU1FVEVSAEhQRV9VU0VSAFNFRV9PVEhFUgBIUEVfQ0JfQ0hVTktfSEVBREVSAE1LQ0FMRU5EQVIAU0VUVVAAV0VCX1NFUlZFUl9JU19ET1dOAFRFQVJET1dOAEhQRV9DTE9TRURfQ09OTkVDVElPTgBIRVVSSVNUSUNfRVhQSVJBVElPTgBESVNDT05ORUNURURfT1BFUkFUSU9OAE5PTl9BVVRIT1JJVEFUSVZFX0lORk9STUFUSU9OAEhQRV9JTlZBTElEX1ZFUlNJT04ASFBFX0NCX01FU1NBR0VfQkVHSU4AU0lURV9JU19GUk9aRU4ASFBFX0lOVkFMSURfSEVBREVSX1RPS0VOAElOVkFMSURfVE9LRU4ARk9SQklEREVOAEVOSEFOQ0VfWU9VUl9DQUxNAEhQRV9JTlZBTElEX1VSTABCTE9DS0VEX0JZX1BBUkVOVEFMX0NPTlRST0wATUtDT0wAQUNMAEhQRV9JTlRFUk5BTABSRVFVRVNUX0hFQURFUl9GSUVMRFNfVE9PX0xBUkdFX1VOT0ZGSUNJQUwASFBFX09LAFVOTElOSwBVTkxPQ0sAUFJJAFJFVFJZX1dJVEgASFBFX0lOVkFMSURfQ09OVEVOVF9MRU5HVEgASFBFX1VORVhQRUNURURfQ09OVEVOVF9MRU5HVEgARkxVU0gAUFJPUFBBVENIAE0tU0VBUkNIAFVSSV9UT09fTE9ORwBQUk9DRVNTSU5HAE1JU0NFTExBTkVPVVNfUEVSU0lTVEVOVF9XQVJOSU5HAE1JU0NFTExBTkVPVVNfV0FSTklORwBIUEVfSU5WQUxJRF9UUkFOU0ZFUl9FTkNPRElORwBFeHBlY3RlZCBDUkxGAEhQRV9JTlZBTElEX0NIVU5LX1NJWkUATU9WRQBDT05USU5VRQBIUEVfQ0JfU1RBVFVTX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJTX0NPTVBMRVRFAEhQRV9DQl9WRVJTSU9OX0NPTVBMRVRFAEhQRV9DQl9VUkxfQ09NUExFVEUASFBFX0NCX0NIVU5LX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJfVkFMVUVfQ09NUExFVEUASFBFX0NCX0NIVU5LX0VYVEVOU0lPTl9WQUxVRV9DT01QTEVURQBIUEVfQ0JfQ0hVTktfRVhURU5TSU9OX05BTUVfQ09NUExFVEUASFBFX0NCX01FU1NBR0VfQ09NUExFVEUASFBFX0NCX01FVEhPRF9DT01QTEVURQBIUEVfQ0JfSEVBREVSX0ZJRUxEX0NPTVBMRVRFAERFTEVURQBIUEVfSU5WQUxJRF9FT0ZfU1RBVEUASU5WQUxJRF9TU0xfQ0VSVElGSUNBVEUAUEFVU0UATk9fUkVTUE9OU0UAVU5TVVBQT1JURURfTUVESUFfVFlQRQBHT05FAE5PVF9BQ0NFUFRBQkxFAFNFUlZJQ0VfVU5BVkFJTEFCTEUAUkFOR0VfTk9UX1NBVElTRklBQkxFAE9SSUdJTl9JU19VTlJFQUNIQUJMRQBSRVNQT05TRV9JU19TVEFMRQBQVVJHRQBNRVJHRQBSRVFVRVNUX0hFQURFUl9GSUVMRFNfVE9PX0xBUkdFAFJFUVVFU1RfSEVBREVSX1RPT19MQVJHRQBQQVlMT0FEX1RPT19MQVJHRQBJTlNVRkZJQ0lFTlRfU1RPUkFHRQBIUEVfUEFVU0VEX1VQR1JBREUASFBFX1BBVVNFRF9IMl9VUEdSQURFAFNPVVJDRQBBTk5PVU5DRQBUUkFDRQBIUEVfVU5FWFBFQ1RFRF9TUEFDRQBERVNDUklCRQBVTlNVQlNDUklCRQBSRUNPUkQASFBFX0lOVkFMSURfTUVUSE9EAE5PVF9GT1VORABQUk9QRklORABVTkJJTkQAUkVCSU5EAFVOQVVUSE9SSVpFRABNRVRIT0RfTk9UX0FMTE9XRUQASFRUUF9WRVJTSU9OX05PVF9TVVBQT1JURUQAQUxSRUFEWV9SRVBPUlRFRABBQ0NFUFRFRABOT1RfSU1QTEVNRU5URUQATE9PUF9ERVRFQ1RFRABIUEVfQ1JfRVhQRUNURUQASFBFX0xGX0VYUEVDVEVEAENSRUFURUQASU1fVVNFRABIUEVfUEFVU0VEAFRJTUVPVVRfT0NDVVJFRABQQVlNRU5UX1JFUVVJUkVEAFBSRUNPTkRJVElPTl9SRVFVSVJFRABQUk9YWV9BVVRIRU5USUNBVElPTl9SRVFVSVJFRABORVRXT1JLX0FVVEhFTlRJQ0FUSU9OX1JFUVVJUkVEAExFTkdUSF9SRVFVSVJFRABTU0xfQ0VSVElGSUNBVEVfUkVRVUlSRUQAVVBHUkFERV9SRVFVSVJFRABQQUdFX0VYUElSRUQAUFJFQ09ORElUSU9OX0ZBSUxFRABFWFBFQ1RBVElPTl9GQUlMRUQAUkVWQUxJREFUSU9OX0ZBSUxFRABTU0xfSEFORFNIQUtFX0ZBSUxFRABMT0NLRUQAVFJBTlNGT1JNQVRJT05fQVBQTElFRABOT1RfTU9ESUZJRUQATk9UX0VYVEVOREVEAEJBTkRXSURUSF9MSU1JVF9FWENFRURFRABTSVRFX0lTX09WRVJMT0FERUQASEVBRABFeHBlY3RlZCBIVFRQLwAAXhMAACYTAAAwEAAA8BcAAJ0TAAAVEgAAORcAAPASAAAKEAAAdRIAAK0SAACCEwAATxQAAH8QAACgFQAAIxQAAIkSAACLFAAATRUAANQRAADPFAAAEBgAAMkWAADcFgAAwREAAOAXAAC7FAAAdBQAAHwVAADlFAAACBcAAB8QAABlFQAAoxQAACgVAAACFQAAmRUAACwQAACLGQAATw8AANQOAABqEAAAzhAAAAIXAACJDgAAbhMAABwTAABmFAAAVhcAAMETAADNEwAAbBMAAGgXAABmFwAAXxcAACITAADODwAAaQ4AANgOAABjFgAAyxMAAKoOAAAoFwAAJhcAAMUTAABdFgAA6BEAAGcTAABlEwAA8hYAAHMTAAAdFwAA+RYAAPMRAADPDgAAzhUAAAwSAACzEQAApREAAGEQAAAyFwAAuxMAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQIBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAIDAgICAgIAAAICAAICAAICAgICAgICAgIABAAAAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgICAgACAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAACAAICAgICAAACAgACAgACAgICAgICAgICAAMABAAAAAICAgICAgICAgICAgICAgICAgICAgICAgICAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAAgACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAbG9zZWVlcC1hbGl2ZQAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAQEBAQEBAQEBAQIBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBY2h1bmtlZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEAAQEBAQEAAAEBAAEBAAEBAQEBAQEBAQEAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABlY3Rpb25lbnQtbGVuZ3Rob25yb3h5LWNvbm5lY3Rpb24AAAAAAAAAAAAAAAAAAAByYW5zZmVyLWVuY29kaW5ncGdyYWRlDQoNCg0KU00NCg0KVFRQL0NFL1RTUC8AAAAAAAAAAAAAAAABAgABAwAAAAAAAAAAAAAAAAAAAAAAAAQBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAQIAAQMAAAAAAAAAAAAAAAAAAAAAAAAEAQEFAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAAAAQAAAgAAAAAAAAAAAAAAAAAAAAAAAAMEAAAEBAQEBAQEBAQEBAUEBAQEBAQEBAQEBAQABAAGBwQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEAAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAABAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAIAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABOT1VOQ0VFQ0tPVVRORUNURVRFQ1JJQkVMVVNIRVRFQURTRUFSQ0hSR0VDVElWSVRZTEVOREFSVkVPVElGWVBUSU9OU0NIU0VBWVNUQVRDSEdFT1JESVJFQ1RPUlRSQ0hQQVJBTUVURVJVUkNFQlNDUklCRUFSRE9XTkFDRUlORE5LQ0tVQlNDUklCRUhUVFAvQURUUC8=' + + +/***/ }), + +/***/ 3434: +/***/ ((module) => { + +module.exports = 'AGFzbQEAAAABMAhgAX8Bf2ADf39/AX9gBH9/f38Bf2AAAGADf39/AGABfwBgAn9/AGAGf39/f39/AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCrLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0+P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H+sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75+AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja+AgAAPC0HqoYCAAA8LQbStgIAADwtB0q+AgAAPC0HfsoCAAA8LQdKygIAADwtB8LCAgAAPC0GpooCAAA8LQfmjgIAADwtBmZ6AgAAPC0G1rICAAA8LQZuwgIAADwtBkrKAgAAPC0G2q4CAAA8LQcKigIAADwtB+LKAgAAPC0GepYCAAA8LQdCigIAADwtBup6AgAAPC0GBnoCAAA8LEMqAgIAAAAtB1qGAgAAhAQsgAQsWACAAIAAtAC1B/gFxIAFBAEdyOgAtCxkAIAAgAC0ALUH9AXEgAUEAR0EBdHI6AC0LGQAgACAALQAtQfsBcSABQQBHQQJ0cjoALQsZACAAIAAtAC1B9wFxIAFBAEdBA3RyOgAtCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAgAiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCBCIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQcaRgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIwIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAggiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2ioCAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCNCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIMIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZqAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAjgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCECIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZWQgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAI8IgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAhQiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEGqm4CAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCQCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIYIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZOAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCJCIERQ0AIAAgBBGAgICAAAAhAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIsIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAigiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2iICAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCUCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIcIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABBwpmAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCICIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZSUgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAJMIgRFDQAgACAEEYCAgIAAACEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAlQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCWCIERQ0AIAAgBBGAgICAAAAhAwsgAwtFAQF/AkACQCAALwEwQRRxQRRHDQBBASEDIAAtAChBAUYNASAALwEyQeUARiEDDAELIAAtAClBBUYhAwsgACADOgAuQQAL/gEBA39BASEDAkAgAC8BMCIEQQhxDQAgACkDIEIAUiEDCwJAAkAgAC0ALkUNAEEBIQUgAC0AKUEFRg0BQQEhBSAEQcAAcUUgA3FBAUcNAQtBACEFIARBwABxDQBBAiEFIARB//8DcSIDQQhxDQACQCADQYAEcUUNAAJAIAAtAChBAUcNACAALQAtQQpxDQBBBQ8LQQQPCwJAIANBIHENAAJAIAAtAChBAUYNACAALwEyQf//A3EiAEGcf2pB5ABJDQAgAEHMAUYNACAAQbACRg0AQQQhBSAEQShxRQ0CIANBiARxQYAERg0CC0EADwtBAEEDIAApAyBQGyEFCyAFC2IBAn9BACEBAkAgAC0AKEEBRg0AIAAvATJB//8DcSICQZx/akHkAEkNACACQcwBRg0AIAJBsAJGDQAgAC8BMCIAQcAAcQ0AQQEhASAAQYgEcUGABEYNACAAQShxRSEBCyABC6cBAQN/AkACQAJAIAAtACpFDQAgAC0AK0UNAEEAIQMgAC8BMCIEQQJxRQ0BDAILQQAhAyAALwEwIgRBAXFFDQELQQEhAyAALQAoQQFGDQAgAC8BMkH//wNxIgVBnH9qQeQASQ0AIAVBzAFGDQAgBUGwAkYNACAEQcAAcQ0AQQAhAyAEQYgEcUGABEYNACAEQShxQQBHIQMLIABBADsBMCAAQQA6AC8gAwuZAQECfwJAAkACQCAALQAqRQ0AIAAtACtFDQBBACEBIAAvATAiAkECcUUNAQwCC0EAIQEgAC8BMCICQQFxRQ0BC0EBIQEgAC0AKEEBRg0AIAAvATJB//8DcSIAQZx/akHkAEkNACAAQcwBRg0AIABBsAJGDQAgAkHAAHENAEEAIQEgAkGIBHFBgARGDQAgAkEocUEARyEBCyABC0kBAXsgAEEQav0MAAAAAAAAAAAAAAAAAAAAACIB/QsDACAAIAH9CwMAIABBMGogAf0LAwAgAEEgaiAB/QsDACAAQd0BNgIcQQALewEBfwJAIAAoAgwiAw0AAkAgACgCBEUNACAAIAE2AgQLAkAgACABIAIQxICAgAAiAw0AIAAoAgwPCyAAIAM2AhxBACEDIAAoAgQiAUUNACAAIAEgAiAAKAIIEYGAgIAAACIBRQ0AIAAgAjYCFCAAIAE2AgwgASEDCyADC+TzAQMOfwN+BH8jgICAgABBEGsiAySAgICAACABIQQgASEFIAEhBiABIQcgASEIIAEhCSABIQogASELIAEhDCABIQ0gASEOIAEhDwJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAAKAIcIhBBf2oO3QHaAQHZAQIDBAUGBwgJCgsMDQ7YAQ8Q1wEREtYBExQVFhcYGRob4AHfARwdHtUBHyAhIiMkJdQBJicoKSorLNMB0gEtLtEB0AEvMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUbbAUdISUrPAc4BS80BTMwBTU5PUFFSU1RVVldYWVpbXF1eX2BhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ent8fX5/gAGBAYIBgwGEAYUBhgGHAYgBiQGKAYsBjAGNAY4BjwGQAZEBkgGTAZQBlQGWAZcBmAGZAZoBmwGcAZ0BngGfAaABoQGiAaMBpAGlAaYBpwGoAakBqgGrAawBrQGuAa8BsAGxAbIBswG0AbUBtgG3AcsBygG4AckBuQHIAboBuwG8Ab0BvgG/AcABwQHCAcMBxAHFAcYBANwBC0EAIRAMxgELQQ4hEAzFAQtBDSEQDMQBC0EPIRAMwwELQRAhEAzCAQtBEyEQDMEBC0EUIRAMwAELQRUhEAy/AQtBFiEQDL4BC0EXIRAMvQELQRghEAy8AQtBGSEQDLsBC0EaIRAMugELQRshEAy5AQtBHCEQDLgBC0EIIRAMtwELQR0hEAy2AQtBICEQDLUBC0EfIRAMtAELQQchEAyzAQtBISEQDLIBC0EiIRAMsQELQR4hEAywAQtBIyEQDK8BC0ESIRAMrgELQREhEAytAQtBJCEQDKwBC0ElIRAMqwELQSYhEAyqAQtBJyEQDKkBC0HDASEQDKgBC0EpIRAMpwELQSshEAymAQtBLCEQDKUBC0EtIRAMpAELQS4hEAyjAQtBLyEQDKIBC0HEASEQDKEBC0EwIRAMoAELQTQhEAyfAQtBDCEQDJ4BC0ExIRAMnQELQTIhEAycAQtBMyEQDJsBC0E5IRAMmgELQTUhEAyZAQtBxQEhEAyYAQtBCyEQDJcBC0E6IRAMlgELQTYhEAyVAQtBCiEQDJQBC0E3IRAMkwELQTghEAySAQtBPCEQDJEBC0E7IRAMkAELQT0hEAyPAQtBCSEQDI4BC0EoIRAMjQELQT4hEAyMAQtBPyEQDIsBC0HAACEQDIoBC0HBACEQDIkBC0HCACEQDIgBC0HDACEQDIcBC0HEACEQDIYBC0HFACEQDIUBC0HGACEQDIQBC0EqIRAMgwELQccAIRAMggELQcgAIRAMgQELQckAIRAMgAELQcoAIRAMfwtBywAhEAx+C0HNACEQDH0LQcwAIRAMfAtBzgAhEAx7C0HPACEQDHoLQdAAIRAMeQtB0QAhEAx4C0HSACEQDHcLQdMAIRAMdgtB1AAhEAx1C0HWACEQDHQLQdUAIRAMcwtBBiEQDHILQdcAIRAMcQtBBSEQDHALQdgAIRAMbwtBBCEQDG4LQdkAIRAMbQtB2gAhEAxsC0HbACEQDGsLQdwAIRAMagtBAyEQDGkLQd0AIRAMaAtB3gAhEAxnC0HfACEQDGYLQeEAIRAMZQtB4AAhEAxkC0HiACEQDGMLQeMAIRAMYgtBAiEQDGELQeQAIRAMYAtB5QAhEAxfC0HmACEQDF4LQecAIRAMXQtB6AAhEAxcC0HpACEQDFsLQeoAIRAMWgtB6wAhEAxZC0HsACEQDFgLQe0AIRAMVwtB7gAhEAxWC0HvACEQDFULQfAAIRAMVAtB8QAhEAxTC0HyACEQDFILQfMAIRAMUQtB9AAhEAxQC0H1ACEQDE8LQfYAIRAMTgtB9wAhEAxNC0H4ACEQDEwLQfkAIRAMSwtB+gAhEAxKC0H7ACEQDEkLQfwAIRAMSAtB/QAhEAxHC0H+ACEQDEYLQf8AIRAMRQtBgAEhEAxEC0GBASEQDEMLQYIBIRAMQgtBgwEhEAxBC0GEASEQDEALQYUBIRAMPwtBhgEhEAw+C0GHASEQDD0LQYgBIRAMPAtBiQEhEAw7C0GKASEQDDoLQYsBIRAMOQtBjAEhEAw4C0GNASEQDDcLQY4BIRAMNgtBjwEhEAw1C0GQASEQDDQLQZEBIRAMMwtBkgEhEAwyC0GTASEQDDELQZQBIRAMMAtBlQEhEAwvC0GWASEQDC4LQZcBIRAMLQtBmAEhEAwsC0GZASEQDCsLQZoBIRAMKgtBmwEhEAwpC0GcASEQDCgLQZ0BIRAMJwtBngEhEAwmC0GfASEQDCULQaABIRAMJAtBoQEhEAwjC0GiASEQDCILQaMBIRAMIQtBpAEhEAwgC0GlASEQDB8LQaYBIRAMHgtBpwEhEAwdC0GoASEQDBwLQakBIRAMGwtBqgEhEAwaC0GrASEQDBkLQawBIRAMGAtBrQEhEAwXC0GuASEQDBYLQQEhEAwVC0GvASEQDBQLQbABIRAMEwtBsQEhEAwSC0GzASEQDBELQbIBIRAMEAtBtAEhEAwPC0G1ASEQDA4LQbYBIRAMDQtBtwEhEAwMC0G4ASEQDAsLQbkBIRAMCgtBugEhEAwJC0G7ASEQDAgLQcYBIRAMBwtBvAEhEAwGC0G9ASEQDAULQb4BIRAMBAtBvwEhEAwDC0HAASEQDAILQcIBIRAMAQtBwQEhEAsDQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIBAOxwEAAQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB4fICEjJSg/QEFERUZHSElKS0xNT1BRUlPeA1dZW1xdYGJlZmdoaWprbG1vcHFyc3R1dnd4eXp7fH1+gAGCAYUBhgGHAYkBiwGMAY0BjgGPAZABkQGUAZUBlgGXAZgBmQGaAZsBnAGdAZ4BnwGgAaEBogGjAaQBpQGmAacBqAGpAaoBqwGsAa0BrgGvAbABsQGyAbMBtAG1AbYBtwG4AbkBugG7AbwBvQG+Ab8BwAHBAcIBwwHEAcUBxgHHAcgByQHKAcsBzAHNAc4BzwHQAdEB0gHTAdQB1QHWAdcB2AHZAdoB2wHcAd0B3gHgAeEB4gHjAeQB5QHmAecB6AHpAeoB6wHsAe0B7gHvAfAB8QHyAfMBmQKkArAC/gL+AgsgASIEIAJHDfMBQd0BIRAM/wMLIAEiECACRw3dAUHDASEQDP4DCyABIgEgAkcNkAFB9wAhEAz9AwsgASIBIAJHDYYBQe8AIRAM/AMLIAEiASACRw1/QeoAIRAM+wMLIAEiASACRw17QegAIRAM+gMLIAEiASACRw14QeYAIRAM+QMLIAEiASACRw0aQRghEAz4AwsgASIBIAJHDRRBEiEQDPcDCyABIgEgAkcNWUHFACEQDPYDCyABIgEgAkcNSkE/IRAM9QMLIAEiASACRw1IQTwhEAz0AwsgASIBIAJHDUFBMSEQDPMDCyAALQAuQQFGDesDDIcCCyAAIAEiASACEMCAgIAAQQFHDeYBIABCADcDIAznAQsgACABIgEgAhC0gICAACIQDecBIAEhAQz1AgsCQCABIgEgAkcNAEEGIRAM8AMLIAAgAUEBaiIBIAIQu4CAgAAiEA3oASABIQEMMQsgAEIANwMgQRIhEAzVAwsgASIQIAJHDStBHSEQDO0DCwJAIAEiASACRg0AIAFBAWohAUEQIRAM1AMLQQchEAzsAwsgAEIAIAApAyAiESACIAEiEGutIhJ9IhMgEyARVhs3AyAgESASViIURQ3lAUEIIRAM6wMLAkAgASIBIAJGDQAgAEGJgICAADYCCCAAIAE2AgQgASEBQRQhEAzSAwtBCSEQDOoDCyABIQEgACkDIFAN5AEgASEBDPICCwJAIAEiASACRw0AQQshEAzpAwsgACABQQFqIgEgAhC2gICAACIQDeUBIAEhAQzyAgsgACABIgEgAhC4gICAACIQDeUBIAEhAQzyAgsgACABIgEgAhC4gICAACIQDeYBIAEhAQwNCyAAIAEiASACELqAgIAAIhAN5wEgASEBDPACCwJAIAEiASACRw0AQQ8hEAzlAwsgAS0AACIQQTtGDQggEEENRw3oASABQQFqIQEM7wILIAAgASIBIAIQuoCAgAAiEA3oASABIQEM8gILA0ACQCABLQAAQfC1gIAAai0AACIQQQFGDQAgEEECRw3rASAAKAIEIRAgAEEANgIEIAAgECABQQFqIgEQuYCAgAAiEA3qASABIQEM9AILIAFBAWoiASACRw0AC0ESIRAM4gMLIAAgASIBIAIQuoCAgAAiEA3pASABIQEMCgsgASIBIAJHDQZBGyEQDOADCwJAIAEiASACRw0AQRYhEAzgAwsgAEGKgICAADYCCCAAIAE2AgQgACABIAIQuICAgAAiEA3qASABIQFBICEQDMYDCwJAIAEiASACRg0AA0ACQCABLQAAQfC3gIAAai0AACIQQQJGDQACQCAQQX9qDgTlAewBAOsB7AELIAFBAWohAUEIIRAMyAMLIAFBAWoiASACRw0AC0EVIRAM3wMLQRUhEAzeAwsDQAJAIAEtAABB8LmAgABqLQAAIhBBAkYNACAQQX9qDgTeAewB4AHrAewBCyABQQFqIgEgAkcNAAtBGCEQDN0DCwJAIAEiASACRg0AIABBi4CAgAA2AgggACABNgIEIAEhAUEHIRAMxAMLQRkhEAzcAwsgAUEBaiEBDAILAkAgASIUIAJHDQBBGiEQDNsDCyAUIQECQCAULQAAQXNqDhTdAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAgDuAgtBACEQIABBADYCHCAAQa+LgIAANgIQIABBAjYCDCAAIBRBAWo2AhQM2gMLAkAgAS0AACIQQTtGDQAgEEENRw3oASABQQFqIQEM5QILIAFBAWohAQtBIiEQDL8DCwJAIAEiECACRw0AQRwhEAzYAwtCACERIBAhASAQLQAAQVBqDjfnAeYBAQIDBAUGBwgAAAAAAAAACQoLDA0OAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPEBESExQAC0EeIRAMvQMLQgIhEQzlAQtCAyERDOQBC0IEIREM4wELQgUhEQziAQtCBiERDOEBC0IHIREM4AELQgghEQzfAQtCCSERDN4BC0IKIREM3QELQgshEQzcAQtCDCERDNsBC0INIREM2gELQg4hEQzZAQtCDyERDNgBC0IKIREM1wELQgshEQzWAQtCDCERDNUBC0INIREM1AELQg4hEQzTAQtCDyERDNIBC0IAIRECQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIBAtAABBUGoON+UB5AEAAQIDBAUGB+YB5gHmAeYB5gHmAeYBCAkKCwwN5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAQ4PEBESE+YBC0ICIREM5AELQgMhEQzjAQtCBCERDOIBC0IFIREM4QELQgYhEQzgAQtCByERDN8BC0IIIREM3gELQgkhEQzdAQtCCiERDNwBC0ILIREM2wELQgwhEQzaAQtCDSERDNkBC0IOIREM2AELQg8hEQzXAQtCCiERDNYBC0ILIREM1QELQgwhEQzUAQtCDSERDNMBC0IOIREM0gELQg8hEQzRAQsgAEIAIAApAyAiESACIAEiEGutIhJ9IhMgEyARVhs3AyAgESASViIURQ3SAUEfIRAMwAMLAkAgASIBIAJGDQAgAEGJgICAADYCCCAAIAE2AgQgASEBQSQhEAynAwtBICEQDL8DCyAAIAEiECACEL6AgIAAQX9qDgW2AQDFAgHRAdIBC0ERIRAMpAMLIABBAToALyAQIQEMuwMLIAEiASACRw3SAUEkIRAMuwMLIAEiDSACRw0eQcYAIRAMugMLIAAgASIBIAIQsoCAgAAiEA3UASABIQEMtQELIAEiECACRw0mQdAAIRAMuAMLAkAgASIBIAJHDQBBKCEQDLgDCyAAQQA2AgQgAEGMgICAADYCCCAAIAEgARCxgICAACIQDdMBIAEhAQzYAQsCQCABIhAgAkcNAEEpIRAMtwMLIBAtAAAiAUEgRg0UIAFBCUcN0wEgEEEBaiEBDBULAkAgASIBIAJGDQAgAUEBaiEBDBcLQSohEAy1AwsCQCABIhAgAkcNAEErIRAMtQMLAkAgEC0AACIBQQlGDQAgAUEgRw3VAQsgAC0ALEEIRg3TASAQIQEMkQMLAkAgASIBIAJHDQBBLCEQDLQDCyABLQAAQQpHDdUBIAFBAWohAQzJAgsgASIOIAJHDdUBQS8hEAyyAwsDQAJAIAEtAAAiEEEgRg0AAkAgEEF2ag4EANwB3AEA2gELIAEhAQzgAQsgAUEBaiIBIAJHDQALQTEhEAyxAwtBMiEQIAEiFCACRg2wAyACIBRrIAAoAgAiAWohFSAUIAFrQQNqIRYCQANAIBQtAAAiF0EgciAXIBdBv39qQf8BcUEaSRtB/wFxIAFB8LuAgABqLQAARw0BAkAgAUEDRw0AQQYhAQyWAwsgAUEBaiEBIBRBAWoiFCACRw0ACyAAIBU2AgAMsQMLIABBADYCACAUIQEM2QELQTMhECABIhQgAkYNrwMgAiAUayAAKAIAIgFqIRUgFCABa0EIaiEWAkADQCAULQAAIhdBIHIgFyAXQb9/akH/AXFBGkkbQf8BcSABQfS7gIAAai0AAEcNAQJAIAFBCEcNAEEFIQEMlQMLIAFBAWohASAUQQFqIhQgAkcNAAsgACAVNgIADLADCyAAQQA2AgAgFCEBDNgBC0E0IRAgASIUIAJGDa4DIAIgFGsgACgCACIBaiEVIBQgAWtBBWohFgJAA0AgFC0AACIXQSByIBcgF0G/f2pB/wFxQRpJG0H/AXEgAUHQwoCAAGotAABHDQECQCABQQVHDQBBByEBDJQDCyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFTYCAAyvAwsgAEEANgIAIBQhAQzXAQsCQCABIgEgAkYNAANAAkAgAS0AAEGAvoCAAGotAAAiEEEBRg0AIBBBAkYNCiABIQEM3QELIAFBAWoiASACRw0AC0EwIRAMrgMLQTAhEAytAwsCQCABIgEgAkYNAANAAkAgAS0AACIQQSBGDQAgEEF2ag4E2QHaAdoB2QHaAQsgAUEBaiIBIAJHDQALQTghEAytAwtBOCEQDKwDCwNAAkAgAS0AACIQQSBGDQAgEEEJRw0DCyABQQFqIgEgAkcNAAtBPCEQDKsDCwNAAkAgAS0AACIQQSBGDQACQAJAIBBBdmoOBNoBAQHaAQALIBBBLEYN2wELIAEhAQwECyABQQFqIgEgAkcNAAtBPyEQDKoDCyABIQEM2wELQcAAIRAgASIUIAJGDagDIAIgFGsgACgCACIBaiEWIBQgAWtBBmohFwJAA0AgFC0AAEEgciABQYDAgIAAai0AAEcNASABQQZGDY4DIAFBAWohASAUQQFqIhQgAkcNAAsgACAWNgIADKkDCyAAQQA2AgAgFCEBC0E2IRAMjgMLAkAgASIPIAJHDQBBwQAhEAynAwsgAEGMgICAADYCCCAAIA82AgQgDyEBIAAtACxBf2oOBM0B1QHXAdkBhwMLIAFBAWohAQzMAQsCQCABIgEgAkYNAANAAkAgAS0AACIQQSByIBAgEEG/f2pB/wFxQRpJG0H/AXEiEEEJRg0AIBBBIEYNAAJAAkACQAJAIBBBnX9qDhMAAwMDAwMDAwEDAwMDAwMDAwMCAwsgAUEBaiEBQTEhEAyRAwsgAUEBaiEBQTIhEAyQAwsgAUEBaiEBQTMhEAyPAwsgASEBDNABCyABQQFqIgEgAkcNAAtBNSEQDKUDC0E1IRAMpAMLAkAgASIBIAJGDQADQAJAIAEtAABBgLyAgABqLQAAQQFGDQAgASEBDNMBCyABQQFqIgEgAkcNAAtBPSEQDKQDC0E9IRAMowMLIAAgASIBIAIQsICAgAAiEA3WASABIQEMAQsgEEEBaiEBC0E8IRAMhwMLAkAgASIBIAJHDQBBwgAhEAygAwsCQANAAkAgAS0AAEF3ag4YAAL+Av4ChAP+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gIA/gILIAFBAWoiASACRw0AC0HCACEQDKADCyABQQFqIQEgAC0ALUEBcUUNvQEgASEBC0EsIRAMhQMLIAEiASACRw3TAUHEACEQDJ0DCwNAAkAgAS0AAEGQwICAAGotAABBAUYNACABIQEMtwILIAFBAWoiASACRw0AC0HFACEQDJwDCyANLQAAIhBBIEYNswEgEEE6Rw2BAyAAKAIEIQEgAEEANgIEIAAgASANEK+AgIAAIgEN0AEgDUEBaiEBDLMCC0HHACEQIAEiDSACRg2aAyACIA1rIAAoAgAiAWohFiANIAFrQQVqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQZDCgIAAai0AAEcNgAMgAUEFRg30AiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyaAwtByAAhECABIg0gAkYNmQMgAiANayAAKAIAIgFqIRYgDSABa0EJaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGWwoCAAGotAABHDf8CAkAgAUEJRw0AQQIhAQz1AgsgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMmQMLAkAgASINIAJHDQBByQAhEAyZAwsCQAJAIA0tAAAiAUEgciABIAFBv39qQf8BcUEaSRtB/wFxQZJ/ag4HAIADgAOAA4ADgAMBgAMLIA1BAWohAUE+IRAMgAMLIA1BAWohAUE/IRAM/wILQcoAIRAgASINIAJGDZcDIAIgDWsgACgCACIBaiEWIA0gAWtBAWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBoMKAgABqLQAARw39AiABQQFGDfACIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJcDC0HLACEQIAEiDSACRg2WAyACIA1rIAAoAgAiAWohFiANIAFrQQ5qIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQaLCgIAAai0AAEcN/AIgAUEORg3wAiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyWAwtBzAAhECABIg0gAkYNlQMgAiANayAAKAIAIgFqIRYgDSABa0EPaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUHAwoCAAGotAABHDfsCAkAgAUEPRw0AQQMhAQzxAgsgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMlQMLQc0AIRAgASINIAJGDZQDIAIgDWsgACgCACIBaiEWIA0gAWtBBWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFB0MKAgABqLQAARw36AgJAIAFBBUcNAEEEIQEM8AILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJQDCwJAIAEiDSACRw0AQc4AIRAMlAMLAkACQAJAAkAgDS0AACIBQSByIAEgAUG/f2pB/wFxQRpJG0H/AXFBnX9qDhMA/QL9Av0C/QL9Av0C/QL9Av0C/QL9Av0CAf0C/QL9AgID/QILIA1BAWohAUHBACEQDP0CCyANQQFqIQFBwgAhEAz8AgsgDUEBaiEBQcMAIRAM+wILIA1BAWohAUHEACEQDPoCCwJAIAEiASACRg0AIABBjYCAgAA2AgggACABNgIEIAEhAUHFACEQDPoCC0HPACEQDJIDCyAQIQECQAJAIBAtAABBdmoOBAGoAqgCAKgCCyAQQQFqIQELQSchEAz4AgsCQCABIgEgAkcNAEHRACEQDJEDCwJAIAEtAABBIEYNACABIQEMjQELIAFBAWohASAALQAtQQFxRQ3HASABIQEMjAELIAEiFyACRw3IAUHSACEQDI8DC0HTACEQIAEiFCACRg2OAyACIBRrIAAoAgAiAWohFiAUIAFrQQFqIRcDQCAULQAAIAFB1sKAgABqLQAARw3MASABQQFGDccBIAFBAWohASAUQQFqIhQgAkcNAAsgACAWNgIADI4DCwJAIAEiASACRw0AQdUAIRAMjgMLIAEtAABBCkcNzAEgAUEBaiEBDMcBCwJAIAEiASACRw0AQdYAIRAMjQMLAkACQCABLQAAQXZqDgQAzQHNAQHNAQsgAUEBaiEBDMcBCyABQQFqIQFBygAhEAzzAgsgACABIgEgAhCugICAACIQDcsBIAEhAUHNACEQDPICCyAALQApQSJGDYUDDKYCCwJAIAEiASACRw0AQdsAIRAMigMLQQAhFEEBIRdBASEWQQAhEAJAAkACQAJAAkACQAJAAkACQCABLQAAQVBqDgrUAdMBAAECAwQFBgjVAQtBAiEQDAYLQQMhEAwFC0EEIRAMBAtBBSEQDAMLQQYhEAwCC0EHIRAMAQtBCCEQC0EAIRdBACEWQQAhFAzMAQtBCSEQQQEhFEEAIRdBACEWDMsBCwJAIAEiASACRw0AQd0AIRAMiQMLIAEtAABBLkcNzAEgAUEBaiEBDKYCCyABIgEgAkcNzAFB3wAhEAyHAwsCQCABIgEgAkYNACAAQY6AgIAANgIIIAAgATYCBCABIQFB0AAhEAzuAgtB4AAhEAyGAwtB4QAhECABIgEgAkYNhQMgAiABayAAKAIAIhRqIRYgASAUa0EDaiEXA0AgAS0AACAUQeLCgIAAai0AAEcNzQEgFEEDRg3MASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyFAwtB4gAhECABIgEgAkYNhAMgAiABayAAKAIAIhRqIRYgASAUa0ECaiEXA0AgAS0AACAUQebCgIAAai0AAEcNzAEgFEECRg3OASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyEAwtB4wAhECABIgEgAkYNgwMgAiABayAAKAIAIhRqIRYgASAUa0EDaiEXA0AgAS0AACAUQenCgIAAai0AAEcNywEgFEEDRg3OASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyDAwsCQCABIgEgAkcNAEHlACEQDIMDCyAAIAFBAWoiASACEKiAgIAAIhANzQEgASEBQdYAIRAM6QILAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgRg0AAkACQAJAIBBBuH9qDgsAAc8BzwHPAc8BzwHPAc8BzwECzwELIAFBAWohAUHSACEQDO0CCyABQQFqIQFB0wAhEAzsAgsgAUEBaiEBQdQAIRAM6wILIAFBAWoiASACRw0AC0HkACEQDIIDC0HkACEQDIEDCwNAAkAgAS0AAEHwwoCAAGotAAAiEEEBRg0AIBBBfmoOA88B0AHRAdIBCyABQQFqIgEgAkcNAAtB5gAhEAyAAwsCQCABIgEgAkYNACABQQFqIQEMAwtB5wAhEAz/AgsDQAJAIAEtAABB8MSAgABqLQAAIhBBAUYNAAJAIBBBfmoOBNIB0wHUAQDVAQsgASEBQdcAIRAM5wILIAFBAWoiASACRw0AC0HoACEQDP4CCwJAIAEiASACRw0AQekAIRAM/gILAkAgAS0AACIQQXZqDhq6AdUB1QG8AdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAcoB1QHVAQDTAQsgAUEBaiEBC0EGIRAM4wILA0ACQCABLQAAQfDGgIAAai0AAEEBRg0AIAEhAQyeAgsgAUEBaiIBIAJHDQALQeoAIRAM+wILAkAgASIBIAJGDQAgAUEBaiEBDAMLQesAIRAM+gILAkAgASIBIAJHDQBB7AAhEAz6AgsgAUEBaiEBDAELAkAgASIBIAJHDQBB7QAhEAz5AgsgAUEBaiEBC0EEIRAM3gILAkAgASIUIAJHDQBB7gAhEAz3AgsgFCEBAkACQAJAIBQtAABB8MiAgABqLQAAQX9qDgfUAdUB1gEAnAIBAtcBCyAUQQFqIQEMCgsgFEEBaiEBDM0BC0EAIRAgAEEANgIcIABBm5KAgAA2AhAgAEEHNgIMIAAgFEEBajYCFAz2AgsCQANAAkAgAS0AAEHwyICAAGotAAAiEEEERg0AAkACQCAQQX9qDgfSAdMB1AHZAQAEAdkBCyABIQFB2gAhEAzgAgsgAUEBaiEBQdwAIRAM3wILIAFBAWoiASACRw0AC0HvACEQDPYCCyABQQFqIQEMywELAkAgASIUIAJHDQBB8AAhEAz1AgsgFC0AAEEvRw3UASAUQQFqIQEMBgsCQCABIhQgAkcNAEHxACEQDPQCCwJAIBQtAAAiAUEvRw0AIBRBAWohAUHdACEQDNsCCyABQXZqIgRBFksN0wFBASAEdEGJgIACcUUN0wEMygILAkAgASIBIAJGDQAgAUEBaiEBQd4AIRAM2gILQfIAIRAM8gILAkAgASIUIAJHDQBB9AAhEAzyAgsgFCEBAkAgFC0AAEHwzICAAGotAABBf2oOA8kClAIA1AELQeEAIRAM2AILAkAgASIUIAJGDQADQAJAIBQtAABB8MqAgABqLQAAIgFBA0YNAAJAIAFBf2oOAssCANUBCyAUIQFB3wAhEAzaAgsgFEEBaiIUIAJHDQALQfMAIRAM8QILQfMAIRAM8AILAkAgASIBIAJGDQAgAEGPgICAADYCCCAAIAE2AgQgASEBQeAAIRAM1wILQfUAIRAM7wILAkAgASIBIAJHDQBB9gAhEAzvAgsgAEGPgICAADYCCCAAIAE2AgQgASEBC0EDIRAM1AILA0AgAS0AAEEgRw3DAiABQQFqIgEgAkcNAAtB9wAhEAzsAgsCQCABIgEgAkcNAEH4ACEQDOwCCyABLQAAQSBHDc4BIAFBAWohAQzvAQsgACABIgEgAhCsgICAACIQDc4BIAEhAQyOAgsCQCABIgQgAkcNAEH6ACEQDOoCCyAELQAAQcwARw3RASAEQQFqIQFBEyEQDM8BCwJAIAEiBCACRw0AQfsAIRAM6QILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEANAIAQtAAAgAUHwzoCAAGotAABHDdABIAFBBUYNzgEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBB+wAhEAzoAgsCQCABIgQgAkcNAEH8ACEQDOgCCwJAAkAgBC0AAEG9f2oODADRAdEB0QHRAdEB0QHRAdEB0QHRAQHRAQsgBEEBaiEBQeYAIRAMzwILIARBAWohAUHnACEQDM4CCwJAIAEiBCACRw0AQf0AIRAM5wILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNzwEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf0AIRAM5wILIABBADYCACAQQQFqIQFBECEQDMwBCwJAIAEiBCACRw0AQf4AIRAM5gILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQfbOgIAAai0AAEcNzgEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf4AIRAM5gILIABBADYCACAQQQFqIQFBFiEQDMsBCwJAIAEiBCACRw0AQf8AIRAM5QILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQfzOgIAAai0AAEcNzQEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf8AIRAM5QILIABBADYCACAQQQFqIQFBBSEQDMoBCwJAIAEiBCACRw0AQYABIRAM5AILIAQtAABB2QBHDcsBIARBAWohAUEIIRAMyQELAkAgASIEIAJHDQBBgQEhEAzjAgsCQAJAIAQtAABBsn9qDgMAzAEBzAELIARBAWohAUHrACEQDMoCCyAEQQFqIQFB7AAhEAzJAgsCQCABIgQgAkcNAEGCASEQDOICCwJAAkAgBC0AAEG4f2oOCADLAcsBywHLAcsBywEBywELIARBAWohAUHqACEQDMkCCyAEQQFqIQFB7QAhEAzIAgsCQCABIgQgAkcNAEGDASEQDOECCyACIARrIAAoAgAiAWohECAEIAFrQQJqIRQCQANAIAQtAAAgAUGAz4CAAGotAABHDckBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgEDYCAEGDASEQDOECC0EAIRAgAEEANgIAIBRBAWohAQzGAQsCQCABIgQgAkcNAEGEASEQDOACCyACIARrIAAoAgAiAWohFCAEIAFrQQRqIRACQANAIAQtAAAgAUGDz4CAAGotAABHDcgBIAFBBEYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGEASEQDOACCyAAQQA2AgAgEEEBaiEBQSMhEAzFAQsCQCABIgQgAkcNAEGFASEQDN8CCwJAAkAgBC0AAEG0f2oOCADIAcgByAHIAcgByAEByAELIARBAWohAUHvACEQDMYCCyAEQQFqIQFB8AAhEAzFAgsCQCABIgQgAkcNAEGGASEQDN4CCyAELQAAQcUARw3FASAEQQFqIQEMgwILAkAgASIEIAJHDQBBhwEhEAzdAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFBiM+AgABqLQAARw3FASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBhwEhEAzdAgsgAEEANgIAIBBBAWohAUEtIRAMwgELAkAgASIEIAJHDQBBiAEhEAzcAgsgAiAEayAAKAIAIgFqIRQgBCABa0EIaiEQAkADQCAELQAAIAFB0M+AgABqLQAARw3EASABQQhGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBiAEhEAzcAgsgAEEANgIAIBBBAWohAUEpIRAMwQELAkAgASIBIAJHDQBBiQEhEAzbAgtBASEQIAEtAABB3wBHDcABIAFBAWohAQyBAgsCQCABIgQgAkcNAEGKASEQDNoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRADQCAELQAAIAFBjM+AgABqLQAARw3BASABQQFGDa8CIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYoBIRAM2QILAkAgASIEIAJHDQBBiwEhEAzZAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBjs+AgABqLQAARw3BASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBiwEhEAzZAgsgAEEANgIAIBBBAWohAUECIRAMvgELAkAgASIEIAJHDQBBjAEhEAzYAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8M+AgABqLQAARw3AASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBjAEhEAzYAgsgAEEANgIAIBBBAWohAUEfIRAMvQELAkAgASIEIAJHDQBBjQEhEAzXAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8s+AgABqLQAARw2/ASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBjQEhEAzXAgsgAEEANgIAIBBBAWohAUEJIRAMvAELAkAgASIEIAJHDQBBjgEhEAzWAgsCQAJAIAQtAABBt39qDgcAvwG/Ab8BvwG/AQG/AQsgBEEBaiEBQfgAIRAMvQILIARBAWohAUH5ACEQDLwCCwJAIAEiBCACRw0AQY8BIRAM1QILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQZHPgIAAai0AAEcNvQEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQY8BIRAM1QILIABBADYCACAQQQFqIQFBGCEQDLoBCwJAIAEiBCACRw0AQZABIRAM1AILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQZfPgIAAai0AAEcNvAEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZABIRAM1AILIABBADYCACAQQQFqIQFBFyEQDLkBCwJAIAEiBCACRw0AQZEBIRAM0wILIAIgBGsgACgCACIBaiEUIAQgAWtBBmohEAJAA0AgBC0AACABQZrPgIAAai0AAEcNuwEgAUEGRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZEBIRAM0wILIABBADYCACAQQQFqIQFBFSEQDLgBCwJAIAEiBCACRw0AQZIBIRAM0gILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQaHPgIAAai0AAEcNugEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZIBIRAM0gILIABBADYCACAQQQFqIQFBHiEQDLcBCwJAIAEiBCACRw0AQZMBIRAM0QILIAQtAABBzABHDbgBIARBAWohAUEKIRAMtgELAkAgBCACRw0AQZQBIRAM0AILAkACQCAELQAAQb9/ag4PALkBuQG5AbkBuQG5AbkBuQG5AbkBuQG5AbkBAbkBCyAEQQFqIQFB/gAhEAy3AgsgBEEBaiEBQf8AIRAMtgILAkAgBCACRw0AQZUBIRAMzwILAkACQCAELQAAQb9/ag4DALgBAbgBCyAEQQFqIQFB/QAhEAy2AgsgBEEBaiEEQYABIRAMtQILAkAgBCACRw0AQZYBIRAMzgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQafPgIAAai0AAEcNtgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZYBIRAMzgILIABBADYCACAQQQFqIQFBCyEQDLMBCwJAIAQgAkcNAEGXASEQDM0CCwJAAkACQAJAIAQtAABBU2oOIwC4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBAbgBuAG4AbgBuAECuAG4AbgBA7gBCyAEQQFqIQFB+wAhEAy2AgsgBEEBaiEBQfwAIRAMtQILIARBAWohBEGBASEQDLQCCyAEQQFqIQRBggEhEAyzAgsCQCAEIAJHDQBBmAEhEAzMAgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBqc+AgABqLQAARw20ASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmAEhEAzMAgsgAEEANgIAIBBBAWohAUEZIRAMsQELAkAgBCACRw0AQZkBIRAMywILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQa7PgIAAai0AAEcNswEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZkBIRAMywILIABBADYCACAQQQFqIQFBBiEQDLABCwJAIAQgAkcNAEGaASEQDMoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUG0z4CAAGotAABHDbIBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGaASEQDMoCCyAAQQA2AgAgEEEBaiEBQRwhEAyvAQsCQCAEIAJHDQBBmwEhEAzJAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBts+AgABqLQAARw2xASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmwEhEAzJAgsgAEEANgIAIBBBAWohAUEnIRAMrgELAkAgBCACRw0AQZwBIRAMyAILAkACQCAELQAAQax/ag4CAAGxAQsgBEEBaiEEQYYBIRAMrwILIARBAWohBEGHASEQDK4CCwJAIAQgAkcNAEGdASEQDMcCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUG4z4CAAGotAABHDa8BIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGdASEQDMcCCyAAQQA2AgAgEEEBaiEBQSYhEAysAQsCQCAEIAJHDQBBngEhEAzGAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBus+AgABqLQAARw2uASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBngEhEAzGAgsgAEEANgIAIBBBAWohAUEDIRAMqwELAkAgBCACRw0AQZ8BIRAMxQILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNrQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZ8BIRAMxQILIABBADYCACAQQQFqIQFBDCEQDKoBCwJAIAQgAkcNAEGgASEQDMQCCyACIARrIAAoAgAiAWohFCAEIAFrQQNqIRACQANAIAQtAAAgAUG8z4CAAGotAABHDawBIAFBA0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGgASEQDMQCCyAAQQA2AgAgEEEBaiEBQQ0hEAypAQsCQCAEIAJHDQBBoQEhEAzDAgsCQAJAIAQtAABBun9qDgsArAGsAawBrAGsAawBrAGsAawBAawBCyAEQQFqIQRBiwEhEAyqAgsgBEEBaiEEQYwBIRAMqQILAkAgBCACRw0AQaIBIRAMwgILIAQtAABB0ABHDakBIARBAWohBAzpAQsCQCAEIAJHDQBBowEhEAzBAgsCQAJAIAQtAABBt39qDgcBqgGqAaoBqgGqAQCqAQsgBEEBaiEEQY4BIRAMqAILIARBAWohAUEiIRAMpgELAkAgBCACRw0AQaQBIRAMwAILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQcDPgIAAai0AAEcNqAEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQaQBIRAMwAILIABBADYCACAQQQFqIQFBHSEQDKUBCwJAIAQgAkcNAEGlASEQDL8CCwJAAkAgBC0AAEGuf2oOAwCoAQGoAQsgBEEBaiEEQZABIRAMpgILIARBAWohAUEEIRAMpAELAkAgBCACRw0AQaYBIRAMvgILAkACQAJAAkACQCAELQAAQb9/ag4VAKoBqgGqAaoBqgGqAaoBqgGqAaoBAaoBqgECqgGqAQOqAaoBBKoBCyAEQQFqIQRBiAEhEAyoAgsgBEEBaiEEQYkBIRAMpwILIARBAWohBEGKASEQDKYCCyAEQQFqIQRBjwEhEAylAgsgBEEBaiEEQZEBIRAMpAILAkAgBCACRw0AQacBIRAMvQILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNpQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQacBIRAMvQILIABBADYCACAQQQFqIQFBESEQDKIBCwJAIAQgAkcNAEGoASEQDLwCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHCz4CAAGotAABHDaQBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGoASEQDLwCCyAAQQA2AgAgEEEBaiEBQSwhEAyhAQsCQCAEIAJHDQBBqQEhEAy7AgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBxc+AgABqLQAARw2jASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBqQEhEAy7AgsgAEEANgIAIBBBAWohAUErIRAMoAELAkAgBCACRw0AQaoBIRAMugILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQcrPgIAAai0AAEcNogEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQaoBIRAMugILIABBADYCACAQQQFqIQFBFCEQDJ8BCwJAIAQgAkcNAEGrASEQDLkCCwJAAkACQAJAIAQtAABBvn9qDg8AAQKkAaQBpAGkAaQBpAGkAaQBpAGkAaQBA6QBCyAEQQFqIQRBkwEhEAyiAgsgBEEBaiEEQZQBIRAMoQILIARBAWohBEGVASEQDKACCyAEQQFqIQRBlgEhEAyfAgsCQCAEIAJHDQBBrAEhEAy4AgsgBC0AAEHFAEcNnwEgBEEBaiEEDOABCwJAIAQgAkcNAEGtASEQDLcCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHNz4CAAGotAABHDZ8BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGtASEQDLcCCyAAQQA2AgAgEEEBaiEBQQ4hEAycAQsCQCAEIAJHDQBBrgEhEAy2AgsgBC0AAEHQAEcNnQEgBEEBaiEBQSUhEAybAQsCQCAEIAJHDQBBrwEhEAy1AgsgAiAEayAAKAIAIgFqIRQgBCABa0EIaiEQAkADQCAELQAAIAFB0M+AgABqLQAARw2dASABQQhGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBrwEhEAy1AgsgAEEANgIAIBBBAWohAUEqIRAMmgELAkAgBCACRw0AQbABIRAMtAILAkACQCAELQAAQat/ag4LAJ0BnQGdAZ0BnQGdAZ0BnQGdAQGdAQsgBEEBaiEEQZoBIRAMmwILIARBAWohBEGbASEQDJoCCwJAIAQgAkcNAEGxASEQDLMCCwJAAkAgBC0AAEG/f2oOFACcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAEBnAELIARBAWohBEGZASEQDJoCCyAEQQFqIQRBnAEhEAyZAgsCQCAEIAJHDQBBsgEhEAyyAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFB2c+AgABqLQAARw2aASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBsgEhEAyyAgsgAEEANgIAIBBBAWohAUEhIRAMlwELAkAgBCACRw0AQbMBIRAMsQILIAIgBGsgACgCACIBaiEUIAQgAWtBBmohEAJAA0AgBC0AACABQd3PgIAAai0AAEcNmQEgAUEGRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbMBIRAMsQILIABBADYCACAQQQFqIQFBGiEQDJYBCwJAIAQgAkcNAEG0ASEQDLACCwJAAkACQCAELQAAQbt/ag4RAJoBmgGaAZoBmgGaAZoBmgGaAQGaAZoBmgGaAZoBApoBCyAEQQFqIQRBnQEhEAyYAgsgBEEBaiEEQZ4BIRAMlwILIARBAWohBEGfASEQDJYCCwJAIAQgAkcNAEG1ASEQDK8CCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUHkz4CAAGotAABHDZcBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG1ASEQDK8CCyAAQQA2AgAgEEEBaiEBQSghEAyUAQsCQCAEIAJHDQBBtgEhEAyuAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFB6s+AgABqLQAARw2WASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBtgEhEAyuAgsgAEEANgIAIBBBAWohAUEHIRAMkwELAkAgBCACRw0AQbcBIRAMrQILAkACQCAELQAAQbt/ag4OAJYBlgGWAZYBlgGWAZYBlgGWAZYBlgGWAQGWAQsgBEEBaiEEQaEBIRAMlAILIARBAWohBEGiASEQDJMCCwJAIAQgAkcNAEG4ASEQDKwCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDZQBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG4ASEQDKwCCyAAQQA2AgAgEEEBaiEBQRIhEAyRAQsCQCAEIAJHDQBBuQEhEAyrAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8M+AgABqLQAARw2TASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBuQEhEAyrAgsgAEEANgIAIBBBAWohAUEgIRAMkAELAkAgBCACRw0AQboBIRAMqgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfLPgIAAai0AAEcNkgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQboBIRAMqgILIABBADYCACAQQQFqIQFBDyEQDI8BCwJAIAQgAkcNAEG7ASEQDKkCCwJAAkAgBC0AAEG3f2oOBwCSAZIBkgGSAZIBAZIBCyAEQQFqIQRBpQEhEAyQAgsgBEEBaiEEQaYBIRAMjwILAkAgBCACRw0AQbwBIRAMqAILIAIgBGsgACgCACIBaiEUIAQgAWtBB2ohEAJAA0AgBC0AACABQfTPgIAAai0AAEcNkAEgAUEHRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbwBIRAMqAILIABBADYCACAQQQFqIQFBGyEQDI0BCwJAIAQgAkcNAEG9ASEQDKcCCwJAAkACQCAELQAAQb5/ag4SAJEBkQGRAZEBkQGRAZEBkQGRAQGRAZEBkQGRAZEBkQECkQELIARBAWohBEGkASEQDI8CCyAEQQFqIQRBpwEhEAyOAgsgBEEBaiEEQagBIRAMjQILAkAgBCACRw0AQb4BIRAMpgILIAQtAABBzgBHDY0BIARBAWohBAzPAQsCQCAEIAJHDQBBvwEhEAylAgsCQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAELQAAQb9/ag4VAAECA5wBBAUGnAGcAZwBBwgJCgucAQwNDg+cAQsgBEEBaiEBQegAIRAMmgILIARBAWohAUHpACEQDJkCCyAEQQFqIQFB7gAhEAyYAgsgBEEBaiEBQfIAIRAMlwILIARBAWohAUHzACEQDJYCCyAEQQFqIQFB9gAhEAyVAgsgBEEBaiEBQfcAIRAMlAILIARBAWohAUH6ACEQDJMCCyAEQQFqIQRBgwEhEAySAgsgBEEBaiEEQYQBIRAMkQILIARBAWohBEGFASEQDJACCyAEQQFqIQRBkgEhEAyPAgsgBEEBaiEEQZgBIRAMjgILIARBAWohBEGgASEQDI0CCyAEQQFqIQRBowEhEAyMAgsgBEEBaiEEQaoBIRAMiwILAkAgBCACRg0AIABBkICAgAA2AgggACAENgIEQasBIRAMiwILQcABIRAMowILIAAgBSACEKqAgIAAIgENiwEgBSEBDFwLAkAgBiACRg0AIAZBAWohBQyNAQtBwgEhEAyhAgsDQAJAIBAtAABBdmoOBIwBAACPAQALIBBBAWoiECACRw0AC0HDASEQDKACCwJAIAcgAkYNACAAQZGAgIAANgIIIAAgBzYCBCAHIQFBASEQDIcCC0HEASEQDJ8CCwJAIAcgAkcNAEHFASEQDJ8CCwJAAkAgBy0AAEF2ag4EAc4BzgEAzgELIAdBAWohBgyNAQsgB0EBaiEFDIkBCwJAIAcgAkcNAEHGASEQDJ4CCwJAAkAgBy0AAEF2ag4XAY8BjwEBjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BAI8BCyAHQQFqIQcLQbABIRAMhAILAkAgCCACRw0AQcgBIRAMnQILIAgtAABBIEcNjQEgAEEAOwEyIAhBAWohAUGzASEQDIMCCyABIRcCQANAIBciByACRg0BIActAABBUGpB/wFxIhBBCk8NzAECQCAALwEyIhRBmTNLDQAgACAUQQpsIhQ7ATIgEEH//wNzIBRB/v8DcUkNACAHQQFqIRcgACAUIBBqIhA7ATIgEEH//wNxQegHSQ0BCwtBACEQIABBADYCHCAAQcGJgIAANgIQIABBDTYCDCAAIAdBAWo2AhQMnAILQccBIRAMmwILIAAgCCACEK6AgIAAIhBFDcoBIBBBFUcNjAEgAEHIATYCHCAAIAg2AhQgAEHJl4CAADYCECAAQRU2AgxBACEQDJoCCwJAIAkgAkcNAEHMASEQDJoCC0EAIRRBASEXQQEhFkEAIRACQAJAAkACQAJAAkACQAJAAkAgCS0AAEFQag4KlgGVAQABAgMEBQYIlwELQQIhEAwGC0EDIRAMBQtBBCEQDAQLQQUhEAwDC0EGIRAMAgtBByEQDAELQQghEAtBACEXQQAhFkEAIRQMjgELQQkhEEEBIRRBACEXQQAhFgyNAQsCQCAKIAJHDQBBzgEhEAyZAgsgCi0AAEEuRw2OASAKQQFqIQkMygELIAsgAkcNjgFB0AEhEAyXAgsCQCALIAJGDQAgAEGOgICAADYCCCAAIAs2AgRBtwEhEAz+AQtB0QEhEAyWAgsCQCAEIAJHDQBB0gEhEAyWAgsgAiAEayAAKAIAIhBqIRQgBCAQa0EEaiELA0AgBC0AACAQQfzPgIAAai0AAEcNjgEgEEEERg3pASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHSASEQDJUCCyAAIAwgAhCsgICAACIBDY0BIAwhAQy4AQsCQCAEIAJHDQBB1AEhEAyUAgsgAiAEayAAKAIAIhBqIRQgBCAQa0EBaiEMA0AgBC0AACAQQYHQgIAAai0AAEcNjwEgEEEBRg2OASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHUASEQDJMCCwJAIAQgAkcNAEHWASEQDJMCCyACIARrIAAoAgAiEGohFCAEIBBrQQJqIQsDQCAELQAAIBBBg9CAgABqLQAARw2OASAQQQJGDZABIBBBAWohECAEQQFqIgQgAkcNAAsgACAUNgIAQdYBIRAMkgILAkAgBCACRw0AQdcBIRAMkgILAkACQCAELQAAQbt/ag4QAI8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwEBjwELIARBAWohBEG7ASEQDPkBCyAEQQFqIQRBvAEhEAz4AQsCQCAEIAJHDQBB2AEhEAyRAgsgBC0AAEHIAEcNjAEgBEEBaiEEDMQBCwJAIAQgAkYNACAAQZCAgIAANgIIIAAgBDYCBEG+ASEQDPcBC0HZASEQDI8CCwJAIAQgAkcNAEHaASEQDI8CCyAELQAAQcgARg3DASAAQQE6ACgMuQELIABBAjoALyAAIAQgAhCmgICAACIQDY0BQcIBIRAM9AELIAAtAChBf2oOArcBuQG4AQsDQAJAIAQtAABBdmoOBACOAY4BAI4BCyAEQQFqIgQgAkcNAAtB3QEhEAyLAgsgAEEAOgAvIAAtAC1BBHFFDYQCCyAAQQA6AC8gAEEBOgA0IAEhAQyMAQsgEEEVRg3aASAAQQA2AhwgACABNgIUIABBp46AgAA2AhAgAEESNgIMQQAhEAyIAgsCQCAAIBAgAhC0gICAACIEDQAgECEBDIECCwJAIARBFUcNACAAQQM2AhwgACAQNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAyIAgsgAEEANgIcIAAgEDYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAMhwILIBBBFUYN1gEgAEEANgIcIAAgATYCFCAAQdqNgIAANgIQIABBFDYCDEEAIRAMhgILIAAoAgQhFyAAQQA2AgQgECARp2oiFiEBIAAgFyAQIBYgFBsiEBC1gICAACIURQ2NASAAQQc2AhwgACAQNgIUIAAgFDYCDEEAIRAMhQILIAAgAC8BMEGAAXI7ATAgASEBC0EqIRAM6gELIBBBFUYN0QEgAEEANgIcIAAgATYCFCAAQYOMgIAANgIQIABBEzYCDEEAIRAMggILIBBBFUYNzwEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAMgQILIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDI0BCyAAQQw2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAMgAILIBBBFUYNzAEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAM/wELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDIwBCyAAQQ02AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM/gELIBBBFUYNyQEgAEEANgIcIAAgATYCFCAAQcaMgIAANgIQIABBIzYCDEEAIRAM/QELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC5gICAACIQDQAgAUEBaiEBDIsBCyAAQQ42AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM/AELIABBADYCHCAAIAE2AhQgAEHAlYCAADYCECAAQQI2AgxBACEQDPsBCyAQQRVGDcUBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDPoBCyAAQRA2AhwgACABNgIUIAAgEDYCDEEAIRAM+QELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC5gICAACIEDQAgAUEBaiEBDPEBCyAAQRE2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM+AELIBBBFUYNwQEgAEEANgIcIAAgATYCFCAAQcaMgIAANgIQIABBIzYCDEEAIRAM9wELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC5gICAACIQDQAgAUEBaiEBDIgBCyAAQRM2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM9gELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC5gICAACIEDQAgAUEBaiEBDO0BCyAAQRQ2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM9QELIBBBFUYNvQEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAM9AELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDIYBCyAAQRY2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM8wELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC3gICAACIEDQAgAUEBaiEBDOkBCyAAQRc2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM8gELIABBADYCHCAAIAE2AhQgAEHNk4CAADYCECAAQQw2AgxBACEQDPEBC0IBIRELIBBBAWohAQJAIAApAyAiEkL//////////w9WDQAgACASQgSGIBGENwMgIAEhAQyEAQsgAEEANgIcIAAgATYCFCAAQa2JgIAANgIQIABBDDYCDEEAIRAM7wELIABBADYCHCAAIBA2AhQgAEHNk4CAADYCECAAQQw2AgxBACEQDO4BCyAAKAIEIRcgAEEANgIEIBAgEadqIhYhASAAIBcgECAWIBQbIhAQtYCAgAAiFEUNcyAAQQU2AhwgACAQNgIUIAAgFDYCDEEAIRAM7QELIABBADYCHCAAIBA2AhQgAEGqnICAADYCECAAQQ82AgxBACEQDOwBCyAAIBAgAhC0gICAACIBDQEgECEBC0EOIRAM0QELAkAgAUEVRw0AIABBAjYCHCAAIBA2AhQgAEGwmICAADYCECAAQRU2AgxBACEQDOoBCyAAQQA2AhwgACAQNgIUIABBp46AgAA2AhAgAEESNgIMQQAhEAzpAQsgAUEBaiEQAkAgAC8BMCIBQYABcUUNAAJAIAAgECACELuAgIAAIgENACAQIQEMcAsgAUEVRw26ASAAQQU2AhwgACAQNgIUIABB+ZeAgAA2AhAgAEEVNgIMQQAhEAzpAQsCQCABQaAEcUGgBEcNACAALQAtQQJxDQAgAEEANgIcIAAgEDYCFCAAQZaTgIAANgIQIABBBDYCDEEAIRAM6QELIAAgECACEL2AgIAAGiAQIQECQAJAAkACQAJAIAAgECACELOAgIAADhYCAQAEBAQEBAQEBAQEBAQEBAQEBAQDBAsgAEEBOgAuCyAAIAAvATBBwAByOwEwIBAhAQtBJiEQDNEBCyAAQSM2AhwgACAQNgIUIABBpZaAgAA2AhAgAEEVNgIMQQAhEAzpAQsgAEEANgIcIAAgEDYCFCAAQdWLgIAANgIQIABBETYCDEEAIRAM6AELIAAtAC1BAXFFDQFBwwEhEAzOAQsCQCANIAJGDQADQAJAIA0tAABBIEYNACANIQEMxAELIA1BAWoiDSACRw0AC0ElIRAM5wELQSUhEAzmAQsgACgCBCEEIABBADYCBCAAIAQgDRCvgICAACIERQ2tASAAQSY2AhwgACAENgIMIAAgDUEBajYCFEEAIRAM5QELIBBBFUYNqwEgAEEANgIcIAAgATYCFCAAQf2NgIAANgIQIABBHTYCDEEAIRAM5AELIABBJzYCHCAAIAE2AhQgACAQNgIMQQAhEAzjAQsgECEBQQEhFAJAAkACQAJAAkACQAJAIAAtACxBfmoOBwYFBQMBAgAFCyAAIAAvATBBCHI7ATAMAwtBAiEUDAELQQQhFAsgAEEBOgAsIAAgAC8BMCAUcjsBMAsgECEBC0ErIRAMygELIABBADYCHCAAIBA2AhQgAEGrkoCAADYCECAAQQs2AgxBACEQDOIBCyAAQQA2AhwgACABNgIUIABB4Y+AgAA2AhAgAEEKNgIMQQAhEAzhAQsgAEEAOgAsIBAhAQy9AQsgECEBQQEhFAJAAkACQAJAAkAgAC0ALEF7ag4EAwECAAULIAAgAC8BMEEIcjsBMAwDC0ECIRQMAQtBBCEUCyAAQQE6ACwgACAALwEwIBRyOwEwCyAQIQELQSkhEAzFAQsgAEEANgIcIAAgATYCFCAAQfCUgIAANgIQIABBAzYCDEEAIRAM3QELAkAgDi0AAEENRw0AIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDkEBaiEBDHULIABBLDYCHCAAIAE2AgwgACAOQQFqNgIUQQAhEAzdAQsgAC0ALUEBcUUNAUHEASEQDMMBCwJAIA4gAkcNAEEtIRAM3AELAkACQANAAkAgDi0AAEF2ag4EAgAAAwALIA5BAWoiDiACRw0AC0EtIRAM3QELIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDiEBDHQLIABBLDYCHCAAIA42AhQgACABNgIMQQAhEAzcAQsgACgCBCEBIABBADYCBAJAIAAgASAOELGAgIAAIgENACAOQQFqIQEMcwsgAEEsNgIcIAAgATYCDCAAIA5BAWo2AhRBACEQDNsBCyAAKAIEIQQgAEEANgIEIAAgBCAOELGAgIAAIgQNoAEgDiEBDM4BCyAQQSxHDQEgAUEBaiEQQQEhAQJAAkACQAJAAkAgAC0ALEF7ag4EAwECBAALIBAhAQwEC0ECIQEMAQtBBCEBCyAAQQE6ACwgACAALwEwIAFyOwEwIBAhAQwBCyAAIAAvATBBCHI7ATAgECEBC0E5IRAMvwELIABBADoALCABIQELQTQhEAy9AQsgACAALwEwQSByOwEwIAEhAQwCCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQsYCAgAAiBA0AIAEhAQzHAQsgAEE3NgIcIAAgATYCFCAAIAQ2AgxBACEQDNQBCyAAQQg6ACwgASEBC0EwIRAMuQELAkAgAC0AKEEBRg0AIAEhAQwECyAALQAtQQhxRQ2TASABIQEMAwsgAC0AMEEgcQ2UAUHFASEQDLcBCwJAIA8gAkYNAAJAA0ACQCAPLQAAQVBqIgFB/wFxQQpJDQAgDyEBQTUhEAy6AQsgACkDICIRQpmz5syZs+bMGVYNASAAIBFCCn4iETcDICARIAGtQv8BgyISQn+FVg0BIAAgESASfDcDICAPQQFqIg8gAkcNAAtBOSEQDNEBCyAAKAIEIQIgAEEANgIEIAAgAiAPQQFqIgQQsYCAgAAiAg2VASAEIQEMwwELQTkhEAzPAQsCQCAALwEwIgFBCHFFDQAgAC0AKEEBRw0AIAAtAC1BCHFFDZABCyAAIAFB9/sDcUGABHI7ATAgDyEBC0E3IRAMtAELIAAgAC8BMEEQcjsBMAyrAQsgEEEVRg2LASAAQQA2AhwgACABNgIUIABB8I6AgAA2AhAgAEEcNgIMQQAhEAzLAQsgAEHDADYCHCAAIAE2AgwgACANQQFqNgIUQQAhEAzKAQsCQCABLQAAQTpHDQAgACgCBCEQIABBADYCBAJAIAAgECABEK+AgIAAIhANACABQQFqIQEMYwsgAEHDADYCHCAAIBA2AgwgACABQQFqNgIUQQAhEAzKAQsgAEEANgIcIAAgATYCFCAAQbGRgIAANgIQIABBCjYCDEEAIRAMyQELIABBADYCHCAAIAE2AhQgAEGgmYCAADYCECAAQR42AgxBACEQDMgBCyAAQQA2AgALIABBgBI7ASogACAXQQFqIgEgAhCogICAACIQDQEgASEBC0HHACEQDKwBCyAQQRVHDYMBIABB0QA2AhwgACABNgIUIABB45eAgAA2AhAgAEEVNgIMQQAhEAzEAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMXgsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAzDAQsgAEEANgIcIAAgFDYCFCAAQcGogIAANgIQIABBBzYCDCAAQQA2AgBBACEQDMIBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxdCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDMEBC0EAIRAgAEEANgIcIAAgATYCFCAAQYCRgIAANgIQIABBCTYCDAzAAQsgEEEVRg19IABBADYCHCAAIAE2AhQgAEGUjYCAADYCECAAQSE2AgxBACEQDL8BC0EBIRZBACEXQQAhFEEBIRALIAAgEDoAKyABQQFqIQECQAJAIAAtAC1BEHENAAJAAkACQCAALQAqDgMBAAIECyAWRQ0DDAILIBQNAQwCCyAXRQ0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQrYCAgAAiEA0AIAEhAQxcCyAAQdgANgIcIAAgATYCFCAAIBA2AgxBACEQDL4BCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQytAQsgAEHZADYCHCAAIAE2AhQgACAENgIMQQAhEAy9AQsgACgCBCEEIABBADYCBAJAIAAgBCABEK2AgIAAIgQNACABIQEMqwELIABB2gA2AhwgACABNgIUIAAgBDYCDEEAIRAMvAELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKkBCyAAQdwANgIcIAAgATYCFCAAIAQ2AgxBACEQDLsBCwJAIAEtAABBUGoiEEH/AXFBCk8NACAAIBA6ACogAUEBaiEBQc8AIRAMogELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKcBCyAAQd4ANgIcIAAgATYCFCAAIAQ2AgxBACEQDLoBCyAAQQA2AgAgF0EBaiEBAkAgAC0AKUEjTw0AIAEhAQxZCyAAQQA2AhwgACABNgIUIABB04mAgAA2AhAgAEEINgIMQQAhEAy5AQsgAEEANgIAC0EAIRAgAEEANgIcIAAgATYCFCAAQZCzgIAANgIQIABBCDYCDAy3AQsgAEEANgIAIBdBAWohAQJAIAAtAClBIUcNACABIQEMVgsgAEEANgIcIAAgATYCFCAAQZuKgIAANgIQIABBCDYCDEEAIRAMtgELIABBADYCACAXQQFqIQECQCAALQApIhBBXWpBC08NACABIQEMVQsCQCAQQQZLDQBBASAQdEHKAHFFDQAgASEBDFULQQAhECAAQQA2AhwgACABNgIUIABB94mAgAA2AhAgAEEINgIMDLUBCyAQQRVGDXEgAEEANgIcIAAgATYCFCAAQbmNgIAANgIQIABBGjYCDEEAIRAMtAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDFQLIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMswELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDE0LIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMsgELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDE0LIABB0wA2AhwgACABNgIUIAAgEDYCDEEAIRAMsQELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDFELIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMsAELIABBADYCHCAAIAE2AhQgAEHGioCAADYCECAAQQc2AgxBACEQDK8BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxJCyAAQdIANgIcIAAgATYCFCAAIBA2AgxBACEQDK4BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxJCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDK0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDKwBCyAAQQA2AhwgACABNgIUIABB3IiAgAA2AhAgAEEHNgIMQQAhEAyrAQsgEEE/Rw0BIAFBAWohAQtBBSEQDJABC0EAIRAgAEEANgIcIAAgATYCFCAAQf2SgIAANgIQIABBBzYCDAyoAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMQgsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAynAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMQgsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAymAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMRgsgAEHlADYCHCAAIAE2AhQgACAQNgIMQQAhEAylAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMPwsgAEHSADYCHCAAIBQ2AhQgACABNgIMQQAhEAykAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMPwsgAEHTADYCHCAAIBQ2AhQgACABNgIMQQAhEAyjAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMQwsgAEHlADYCHCAAIBQ2AhQgACABNgIMQQAhEAyiAQsgAEEANgIcIAAgFDYCFCAAQcOPgIAANgIQIABBBzYCDEEAIRAMoQELIABBADYCHCAAIAE2AhQgAEHDj4CAADYCECAAQQc2AgxBACEQDKABC0EAIRAgAEEANgIcIAAgFDYCFCAAQYycgIAANgIQIABBBzYCDAyfAQsgAEEANgIcIAAgFDYCFCAAQYycgIAANgIQIABBBzYCDEEAIRAMngELIABBADYCHCAAIBQ2AhQgAEH+kYCAADYCECAAQQc2AgxBACEQDJ0BCyAAQQA2AhwgACABNgIUIABBjpuAgAA2AhAgAEEGNgIMQQAhEAycAQsgEEEVRg1XIABBADYCHCAAIAE2AhQgAEHMjoCAADYCECAAQSA2AgxBACEQDJsBCyAAQQA2AgAgEEEBaiEBQSQhEAsgACAQOgApIAAoAgQhECAAQQA2AgQgACAQIAEQq4CAgAAiEA1UIAEhAQw+CyAAQQA2AgALQQAhECAAQQA2AhwgACAENgIUIABB8ZuAgAA2AhAgAEEGNgIMDJcBCyABQRVGDVAgAEEANgIcIAAgBTYCFCAAQfCMgIAANgIQIABBGzYCDEEAIRAMlgELIAAoAgQhBSAAQQA2AgQgACAFIBAQqYCAgAAiBQ0BIBBBAWohBQtBrQEhEAx7CyAAQcEBNgIcIAAgBTYCDCAAIBBBAWo2AhRBACEQDJMBCyAAKAIEIQYgAEEANgIEIAAgBiAQEKmAgIAAIgYNASAQQQFqIQYLQa4BIRAMeAsgAEHCATYCHCAAIAY2AgwgACAQQQFqNgIUQQAhEAyQAQsgAEEANgIcIAAgBzYCFCAAQZeLgIAANgIQIABBDTYCDEEAIRAMjwELIABBADYCHCAAIAg2AhQgAEHjkICAADYCECAAQQk2AgxBACEQDI4BCyAAQQA2AhwgACAINgIUIABBlI2AgAA2AhAgAEEhNgIMQQAhEAyNAQtBASEWQQAhF0EAIRRBASEQCyAAIBA6ACsgCUEBaiEIAkACQCAALQAtQRBxDQACQAJAAkAgAC0AKg4DAQACBAsgFkUNAwwCCyAUDQEMAgsgF0UNAQsgACgCBCEQIABBADYCBCAAIBAgCBCtgICAACIQRQ09IABByQE2AhwgACAINgIUIAAgEDYCDEEAIRAMjAELIAAoAgQhBCAAQQA2AgQgACAEIAgQrYCAgAAiBEUNdiAAQcoBNgIcIAAgCDYCFCAAIAQ2AgxBACEQDIsBCyAAKAIEIQQgAEEANgIEIAAgBCAJEK2AgIAAIgRFDXQgAEHLATYCHCAAIAk2AhQgACAENgIMQQAhEAyKAQsgACgCBCEEIABBADYCBCAAIAQgChCtgICAACIERQ1yIABBzQE2AhwgACAKNgIUIAAgBDYCDEEAIRAMiQELAkAgCy0AAEFQaiIQQf8BcUEKTw0AIAAgEDoAKiALQQFqIQpBtgEhEAxwCyAAKAIEIQQgAEEANgIEIAAgBCALEK2AgIAAIgRFDXAgAEHPATYCHCAAIAs2AhQgACAENgIMQQAhEAyIAQsgAEEANgIcIAAgBDYCFCAAQZCzgIAANgIQIABBCDYCDCAAQQA2AgBBACEQDIcBCyABQRVGDT8gAEEANgIcIAAgDDYCFCAAQcyOgIAANgIQIABBIDYCDEEAIRAMhgELIABBgQQ7ASggACgCBCEQIABCADcDACAAIBAgDEEBaiIMEKuAgIAAIhBFDTggAEHTATYCHCAAIAw2AhQgACAQNgIMQQAhEAyFAQsgAEEANgIAC0EAIRAgAEEANgIcIAAgBDYCFCAAQdibgIAANgIQIABBCDYCDAyDAQsgACgCBCEQIABCADcDACAAIBAgC0EBaiILEKuAgIAAIhANAUHGASEQDGkLIABBAjoAKAxVCyAAQdUBNgIcIAAgCzYCFCAAIBA2AgxBACEQDIABCyAQQRVGDTcgAEEANgIcIAAgBDYCFCAAQaSMgIAANgIQIABBEDYCDEEAIRAMfwsgAC0ANEEBRw00IAAgBCACELyAgIAAIhBFDTQgEEEVRw01IABB3AE2AhwgACAENgIUIABB1ZaAgAA2AhAgAEEVNgIMQQAhEAx+C0EAIRAgAEEANgIcIABBr4uAgAA2AhAgAEECNgIMIAAgFEEBajYCFAx9C0EAIRAMYwtBAiEQDGILQQ0hEAxhC0EPIRAMYAtBJSEQDF8LQRMhEAxeC0EVIRAMXQtBFiEQDFwLQRchEAxbC0EYIRAMWgtBGSEQDFkLQRohEAxYC0EbIRAMVwtBHCEQDFYLQR0hEAxVC0EfIRAMVAtBISEQDFMLQSMhEAxSC0HGACEQDFELQS4hEAxQC0EvIRAMTwtBOyEQDE4LQT0hEAxNC0HIACEQDEwLQckAIRAMSwtBywAhEAxKC0HMACEQDEkLQc4AIRAMSAtB0QAhEAxHC0HVACEQDEYLQdgAIRAMRQtB2QAhEAxEC0HbACEQDEMLQeQAIRAMQgtB5QAhEAxBC0HxACEQDEALQfQAIRAMPwtBjQEhEAw+C0GXASEQDD0LQakBIRAMPAtBrAEhEAw7C0HAASEQDDoLQbkBIRAMOQtBrwEhEAw4C0GxASEQDDcLQbIBIRAMNgtBtAEhEAw1C0G1ASEQDDQLQboBIRAMMwtBvQEhEAwyC0G/ASEQDDELQcEBIRAMMAsgAEEANgIcIAAgBDYCFCAAQemLgIAANgIQIABBHzYCDEEAIRAMSAsgAEHbATYCHCAAIAQ2AhQgAEH6loCAADYCECAAQRU2AgxBACEQDEcLIABB+AA2AhwgACAMNgIUIABBypiAgAA2AhAgAEEVNgIMQQAhEAxGCyAAQdEANgIcIAAgBTYCFCAAQbCXgIAANgIQIABBFTYCDEEAIRAMRQsgAEH5ADYCHCAAIAE2AhQgACAQNgIMQQAhEAxECyAAQfgANgIcIAAgATYCFCAAQcqYgIAANgIQIABBFTYCDEEAIRAMQwsgAEHkADYCHCAAIAE2AhQgAEHjl4CAADYCECAAQRU2AgxBACEQDEILIABB1wA2AhwgACABNgIUIABByZeAgAA2AhAgAEEVNgIMQQAhEAxBCyAAQQA2AhwgACABNgIUIABBuY2AgAA2AhAgAEEaNgIMQQAhEAxACyAAQcIANgIcIAAgATYCFCAAQeOYgIAANgIQIABBFTYCDEEAIRAMPwsgAEEANgIEIAAgDyAPELGAgIAAIgRFDQEgAEE6NgIcIAAgBDYCDCAAIA9BAWo2AhRBACEQDD4LIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCxgICAACIERQ0AIABBOzYCHCAAIAQ2AgwgACABQQFqNgIUQQAhEAw+CyABQQFqIQEMLQsgD0EBaiEBDC0LIABBADYCHCAAIA82AhQgAEHkkoCAADYCECAAQQQ2AgxBACEQDDsLIABBNjYCHCAAIAQ2AhQgACACNgIMQQAhEAw6CyAAQS42AhwgACAONgIUIAAgBDYCDEEAIRAMOQsgAEHQADYCHCAAIAE2AhQgAEGRmICAADYCECAAQRU2AgxBACEQDDgLIA1BAWohAQwsCyAAQRU2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAw2CyAAQRs2AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAw1CyAAQQ82AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAw0CyAAQQs2AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAwzCyAAQRo2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAwyCyAAQQs2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAwxCyAAQQo2AhwgACABNgIUIABB5JaAgAA2AhAgAEEVNgIMQQAhEAwwCyAAQR42AhwgACABNgIUIABB+ZeAgAA2AhAgAEEVNgIMQQAhEAwvCyAAQQA2AhwgACAQNgIUIABB2o2AgAA2AhAgAEEUNgIMQQAhEAwuCyAAQQQ2AhwgACABNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAwtCyAAQQA2AgAgC0EBaiELC0G4ASEQDBILIABBADYCACAQQQFqIQFB9QAhEAwRCyABIQECQCAALQApQQVHDQBB4wAhEAwRC0HiACEQDBALQQAhECAAQQA2AhwgAEHkkYCAADYCECAAQQc2AgwgACAUQQFqNgIUDCgLIABBADYCACAXQQFqIQFBwAAhEAwOC0EBIQELIAAgAToALCAAQQA2AgAgF0EBaiEBC0EoIRAMCwsgASEBC0E4IRAMCQsCQCABIg8gAkYNAANAAkAgDy0AAEGAvoCAAGotAAAiAUEBRg0AIAFBAkcNAyAPQQFqIQEMBAsgD0EBaiIPIAJHDQALQT4hEAwiC0E+IRAMIQsgAEEAOgAsIA8hAQwBC0ELIRAMBgtBOiEQDAULIAFBAWohAUEtIRAMBAsgACABOgAsIABBADYCACAWQQFqIQFBDCEQDAMLIABBADYCACAXQQFqIQFBCiEQDAILIABBADYCAAsgAEEAOgAsIA0hAUEJIRAMAAsLQQAhECAAQQA2AhwgACALNgIUIABBzZCAgAA2AhAgAEEJNgIMDBcLQQAhECAAQQA2AhwgACAKNgIUIABB6YqAgAA2AhAgAEEJNgIMDBYLQQAhECAAQQA2AhwgACAJNgIUIABBt5CAgAA2AhAgAEEJNgIMDBULQQAhECAAQQA2AhwgACAINgIUIABBnJGAgAA2AhAgAEEJNgIMDBQLQQAhECAAQQA2AhwgACABNgIUIABBzZCAgAA2AhAgAEEJNgIMDBMLQQAhECAAQQA2AhwgACABNgIUIABB6YqAgAA2AhAgAEEJNgIMDBILQQAhECAAQQA2AhwgACABNgIUIABBt5CAgAA2AhAgAEEJNgIMDBELQQAhECAAQQA2AhwgACABNgIUIABBnJGAgAA2AhAgAEEJNgIMDBALQQAhECAAQQA2AhwgACABNgIUIABBl5WAgAA2AhAgAEEPNgIMDA8LQQAhECAAQQA2AhwgACABNgIUIABBl5WAgAA2AhAgAEEPNgIMDA4LQQAhECAAQQA2AhwgACABNgIUIABBwJKAgAA2AhAgAEELNgIMDA0LQQAhECAAQQA2AhwgACABNgIUIABBlYmAgAA2AhAgAEELNgIMDAwLQQAhECAAQQA2AhwgACABNgIUIABB4Y+AgAA2AhAgAEEKNgIMDAsLQQAhECAAQQA2AhwgACABNgIUIABB+4+AgAA2AhAgAEEKNgIMDAoLQQAhECAAQQA2AhwgACABNgIUIABB8ZmAgAA2AhAgAEECNgIMDAkLQQAhECAAQQA2AhwgACABNgIUIABBxJSAgAA2AhAgAEECNgIMDAgLQQAhECAAQQA2AhwgACABNgIUIABB8pWAgAA2AhAgAEECNgIMDAcLIABBAjYCHCAAIAE2AhQgAEGcmoCAADYCECAAQRY2AgxBACEQDAYLQQEhEAwFC0HUACEQIAEiBCACRg0EIANBCGogACAEIAJB2MKAgABBChDFgICAACADKAIMIQQgAygCCA4DAQQCAAsQyoCAgAAACyAAQQA2AhwgAEG1moCAADYCECAAQRc2AgwgACAEQQFqNgIUQQAhEAwCCyAAQQA2AhwgACAENgIUIABBypqAgAA2AhAgAEEJNgIMQQAhEAwBCwJAIAEiBCACRw0AQSIhEAwBCyAAQYmAgIAANgIIIAAgBDYCBEEhIRALIANBEGokgICAgAAgEAuvAQECfyABKAIAIQYCQAJAIAIgA0YNACAEIAZqIQQgBiADaiACayEHIAIgBkF/cyAFaiIGaiEFA0ACQCACLQAAIAQtAABGDQBBAiEEDAMLAkAgBg0AQQAhBCAFIQIMAwsgBkF/aiEGIARBAWohBCACQQFqIgIgA0cNAAsgByEGIAMhAgsgAEEBNgIAIAEgBjYCACAAIAI2AgQPCyABQQA2AgAgACAENgIAIAAgAjYCBAsKACAAEMeAgIAAC/I2AQt/I4CAgIAAQRBrIgEkgICAgAACQEEAKAKg0ICAAA0AQQAQy4CAgABBgNSEgABrIgJB2QBJDQBBACEDAkBBACgC4NOAgAAiBA0AQQBCfzcC7NOAgABBAEKAgISAgIDAADcC5NOAgABBACABQQhqQXBxQdiq1aoFcyIENgLg04CAAEEAQQA2AvTTgIAAQQBBADYCxNOAgAALQQAgAjYCzNOAgABBAEGA1ISAADYCyNOAgABBAEGA1ISAADYCmNCAgABBACAENgKs0ICAAEEAQX82AqjQgIAAA0AgA0HE0ICAAGogA0G40ICAAGoiBDYCACAEIANBsNCAgABqIgU2AgAgA0G80ICAAGogBTYCACADQczQgIAAaiADQcDQgIAAaiIFNgIAIAUgBDYCACADQdTQgIAAaiADQcjQgIAAaiIENgIAIAQgBTYCACADQdDQgIAAaiAENgIAIANBIGoiA0GAAkcNAAtBgNSEgABBeEGA1ISAAGtBD3FBAEGA1ISAAEEIakEPcRsiA2oiBEEEaiACQUhqIgUgA2siA0EBcjYCAEEAQQAoAvDTgIAANgKk0ICAAEEAIAM2ApTQgIAAQQAgBDYCoNCAgABBgNSEgAAgBWpBODYCBAsCQAJAAkACQAJAAkACQAJAAkACQAJAAkAgAEHsAUsNAAJAQQAoAojQgIAAIgZBECAAQRNqQXBxIABBC0kbIgJBA3YiBHYiA0EDcUUNAAJAAkAgA0EBcSAEckEBcyIFQQN0IgRBsNCAgABqIgMgBEG40ICAAGooAgAiBCgCCCICRw0AQQAgBkF+IAV3cTYCiNCAgAAMAQsgAyACNgIIIAIgAzYCDAsgBEEIaiEDIAQgBUEDdCIFQQNyNgIEIAQgBWoiBCAEKAIEQQFyNgIEDAwLIAJBACgCkNCAgAAiB00NAQJAIANFDQACQAJAIAMgBHRBAiAEdCIDQQAgA2tycSIDQQAgA2txQX9qIgMgA0EMdkEQcSIDdiIEQQV2QQhxIgUgA3IgBCAFdiIDQQJ2QQRxIgRyIAMgBHYiA0EBdkECcSIEciADIAR2IgNBAXZBAXEiBHIgAyAEdmoiBEEDdCIDQbDQgIAAaiIFIANBuNCAgABqKAIAIgMoAggiAEcNAEEAIAZBfiAEd3EiBjYCiNCAgAAMAQsgBSAANgIIIAAgBTYCDAsgAyACQQNyNgIEIAMgBEEDdCIEaiAEIAJrIgU2AgAgAyACaiIAIAVBAXI2AgQCQCAHRQ0AIAdBeHFBsNCAgABqIQJBACgCnNCAgAAhBAJAAkAgBkEBIAdBA3Z0IghxDQBBACAGIAhyNgKI0ICAACACIQgMAQsgAigCCCEICyAIIAQ2AgwgAiAENgIIIAQgAjYCDCAEIAg2AggLIANBCGohA0EAIAA2ApzQgIAAQQAgBTYCkNCAgAAMDAtBACgCjNCAgAAiCUUNASAJQQAgCWtxQX9qIgMgA0EMdkEQcSIDdiIEQQV2QQhxIgUgA3IgBCAFdiIDQQJ2QQRxIgRyIAMgBHYiA0EBdkECcSIEciADIAR2IgNBAXZBAXEiBHIgAyAEdmpBAnRBuNKAgABqKAIAIgAoAgRBeHEgAmshBCAAIQUCQANAAkAgBSgCECIDDQAgBUEUaigCACIDRQ0CCyADKAIEQXhxIAJrIgUgBCAFIARJIgUbIQQgAyAAIAUbIQAgAyEFDAALCyAAKAIYIQoCQCAAKAIMIgggAEYNACAAKAIIIgNBACgCmNCAgABJGiAIIAM2AgggAyAINgIMDAsLAkAgAEEUaiIFKAIAIgMNACAAKAIQIgNFDQMgAEEQaiEFCwNAIAUhCyADIghBFGoiBSgCACIDDQAgCEEQaiEFIAgoAhAiAw0ACyALQQA2AgAMCgtBfyECIABBv39LDQAgAEETaiIDQXBxIQJBACgCjNCAgAAiB0UNAEEAIQsCQCACQYACSQ0AQR8hCyACQf///wdLDQAgA0EIdiIDIANBgP4/akEQdkEIcSIDdCIEIARBgOAfakEQdkEEcSIEdCIFIAVBgIAPakEQdkECcSIFdEEPdiADIARyIAVyayIDQQF0IAIgA0EVanZBAXFyQRxqIQsLQQAgAmshBAJAAkACQAJAIAtBAnRBuNKAgABqKAIAIgUNAEEAIQNBACEIDAELQQAhAyACQQBBGSALQQF2ayALQR9GG3QhAEEAIQgDQAJAIAUoAgRBeHEgAmsiBiAETw0AIAYhBCAFIQggBg0AQQAhBCAFIQggBSEDDAMLIAMgBUEUaigCACIGIAYgBSAAQR12QQRxakEQaigCACIFRhsgAyAGGyEDIABBAXQhACAFDQALCwJAIAMgCHINAEEAIQhBAiALdCIDQQAgA2tyIAdxIgNFDQMgA0EAIANrcUF/aiIDIANBDHZBEHEiA3YiBUEFdkEIcSIAIANyIAUgAHYiA0ECdkEEcSIFciADIAV2IgNBAXZBAnEiBXIgAyAFdiIDQQF2QQFxIgVyIAMgBXZqQQJ0QbjSgIAAaigCACEDCyADRQ0BCwNAIAMoAgRBeHEgAmsiBiAESSEAAkAgAygCECIFDQAgA0EUaigCACEFCyAGIAQgABshBCADIAggABshCCAFIQMgBQ0ACwsgCEUNACAEQQAoApDQgIAAIAJrTw0AIAgoAhghCwJAIAgoAgwiACAIRg0AIAgoAggiA0EAKAKY0ICAAEkaIAAgAzYCCCADIAA2AgwMCQsCQCAIQRRqIgUoAgAiAw0AIAgoAhAiA0UNAyAIQRBqIQULA0AgBSEGIAMiAEEUaiIFKAIAIgMNACAAQRBqIQUgACgCECIDDQALIAZBADYCAAwICwJAQQAoApDQgIAAIgMgAkkNAEEAKAKc0ICAACEEAkACQCADIAJrIgVBEEkNACAEIAJqIgAgBUEBcjYCBEEAIAU2ApDQgIAAQQAgADYCnNCAgAAgBCADaiAFNgIAIAQgAkEDcjYCBAwBCyAEIANBA3I2AgQgBCADaiIDIAMoAgRBAXI2AgRBAEEANgKc0ICAAEEAQQA2ApDQgIAACyAEQQhqIQMMCgsCQEEAKAKU0ICAACIAIAJNDQBBACgCoNCAgAAiAyACaiIEIAAgAmsiBUEBcjYCBEEAIAU2ApTQgIAAQQAgBDYCoNCAgAAgAyACQQNyNgIEIANBCGohAwwKCwJAAkBBACgC4NOAgABFDQBBACgC6NOAgAAhBAwBC0EAQn83AuzTgIAAQQBCgICEgICAwAA3AuTTgIAAQQAgAUEMakFwcUHYqtWqBXM2AuDTgIAAQQBBADYC9NOAgABBAEEANgLE04CAAEGAgAQhBAtBACEDAkAgBCACQccAaiIHaiIGQQAgBGsiC3EiCCACSw0AQQBBMDYC+NOAgAAMCgsCQEEAKALA04CAACIDRQ0AAkBBACgCuNOAgAAiBCAIaiIFIARNDQAgBSADTQ0BC0EAIQNBAEEwNgL404CAAAwKC0EALQDE04CAAEEEcQ0EAkACQAJAQQAoAqDQgIAAIgRFDQBByNOAgAAhAwNAAkAgAygCACIFIARLDQAgBSADKAIEaiAESw0DCyADKAIIIgMNAAsLQQAQy4CAgAAiAEF/Rg0FIAghBgJAQQAoAuTTgIAAIgNBf2oiBCAAcUUNACAIIABrIAQgAGpBACADa3FqIQYLIAYgAk0NBSAGQf7///8HSw0FAkBBACgCwNOAgAAiA0UNAEEAKAK404CAACIEIAZqIgUgBE0NBiAFIANLDQYLIAYQy4CAgAAiAyAARw0BDAcLIAYgAGsgC3EiBkH+////B0sNBCAGEMuAgIAAIgAgAygCACADKAIEakYNAyAAIQMLAkAgA0F/Rg0AIAJByABqIAZNDQACQCAHIAZrQQAoAujTgIAAIgRqQQAgBGtxIgRB/v///wdNDQAgAyEADAcLAkAgBBDLgICAAEF/Rg0AIAQgBmohBiADIQAMBwtBACAGaxDLgICAABoMBAsgAyEAIANBf0cNBQwDC0EAIQgMBwtBACEADAULIABBf0cNAgtBAEEAKALE04CAAEEEcjYCxNOAgAALIAhB/v///wdLDQEgCBDLgICAACEAQQAQy4CAgAAhAyAAQX9GDQEgA0F/Rg0BIAAgA08NASADIABrIgYgAkE4ak0NAQtBAEEAKAK404CAACAGaiIDNgK404CAAAJAIANBACgCvNOAgABNDQBBACADNgK804CAAAsCQAJAAkACQEEAKAKg0ICAACIERQ0AQcjTgIAAIQMDQCAAIAMoAgAiBSADKAIEIghqRg0CIAMoAggiAw0ADAMLCwJAAkBBACgCmNCAgAAiA0UNACAAIANPDQELQQAgADYCmNCAgAALQQAhA0EAIAY2AszTgIAAQQAgADYCyNOAgABBAEF/NgKo0ICAAEEAQQAoAuDTgIAANgKs0ICAAEEAQQA2AtTTgIAAA0AgA0HE0ICAAGogA0G40ICAAGoiBDYCACAEIANBsNCAgABqIgU2AgAgA0G80ICAAGogBTYCACADQczQgIAAaiADQcDQgIAAaiIFNgIAIAUgBDYCACADQdTQgIAAaiADQcjQgIAAaiIENgIAIAQgBTYCACADQdDQgIAAaiAENgIAIANBIGoiA0GAAkcNAAsgAEF4IABrQQ9xQQAgAEEIakEPcRsiA2oiBCAGQUhqIgUgA2siA0EBcjYCBEEAQQAoAvDTgIAANgKk0ICAAEEAIAM2ApTQgIAAQQAgBDYCoNCAgAAgACAFakE4NgIEDAILIAMtAAxBCHENACAEIAVJDQAgBCAATw0AIARBeCAEa0EPcUEAIARBCGpBD3EbIgVqIgBBACgClNCAgAAgBmoiCyAFayIFQQFyNgIEIAMgCCAGajYCBEEAQQAoAvDTgIAANgKk0ICAAEEAIAU2ApTQgIAAQQAgADYCoNCAgAAgBCALakE4NgIEDAELAkAgAEEAKAKY0ICAACIITw0AQQAgADYCmNCAgAAgACEICyAAIAZqIQVByNOAgAAhAwJAAkACQAJAAkACQAJAA0AgAygCACAFRg0BIAMoAggiAw0ADAILCyADLQAMQQhxRQ0BC0HI04CAACEDA0ACQCADKAIAIgUgBEsNACAFIAMoAgRqIgUgBEsNAwsgAygCCCEDDAALCyADIAA2AgAgAyADKAIEIAZqNgIEIABBeCAAa0EPcUEAIABBCGpBD3EbaiILIAJBA3I2AgQgBUF4IAVrQQ9xQQAgBUEIakEPcRtqIgYgCyACaiICayEDAkAgBiAERw0AQQAgAjYCoNCAgABBAEEAKAKU0ICAACADaiIDNgKU0ICAACACIANBAXI2AgQMAwsCQCAGQQAoApzQgIAARw0AQQAgAjYCnNCAgABBAEEAKAKQ0ICAACADaiIDNgKQ0ICAACACIANBAXI2AgQgAiADaiADNgIADAMLAkAgBigCBCIEQQNxQQFHDQAgBEF4cSEHAkACQCAEQf8BSw0AIAYoAggiBSAEQQN2IghBA3RBsNCAgABqIgBGGgJAIAYoAgwiBCAFRw0AQQBBACgCiNCAgABBfiAId3E2AojQgIAADAILIAQgAEYaIAQgBTYCCCAFIAQ2AgwMAQsgBigCGCEJAkACQCAGKAIMIgAgBkYNACAGKAIIIgQgCEkaIAAgBDYCCCAEIAA2AgwMAQsCQCAGQRRqIgQoAgAiBQ0AIAZBEGoiBCgCACIFDQBBACEADAELA0AgBCEIIAUiAEEUaiIEKAIAIgUNACAAQRBqIQQgACgCECIFDQALIAhBADYCAAsgCUUNAAJAAkAgBiAGKAIcIgVBAnRBuNKAgABqIgQoAgBHDQAgBCAANgIAIAANAUEAQQAoAozQgIAAQX4gBXdxNgKM0ICAAAwCCyAJQRBBFCAJKAIQIAZGG2ogADYCACAARQ0BCyAAIAk2AhgCQCAGKAIQIgRFDQAgACAENgIQIAQgADYCGAsgBigCFCIERQ0AIABBFGogBDYCACAEIAA2AhgLIAcgA2ohAyAGIAdqIgYoAgQhBAsgBiAEQX5xNgIEIAIgA2ogAzYCACACIANBAXI2AgQCQCADQf8BSw0AIANBeHFBsNCAgABqIQQCQAJAQQAoAojQgIAAIgVBASADQQN2dCIDcQ0AQQAgBSADcjYCiNCAgAAgBCEDDAELIAQoAgghAwsgAyACNgIMIAQgAjYCCCACIAQ2AgwgAiADNgIIDAMLQR8hBAJAIANB////B0sNACADQQh2IgQgBEGA/j9qQRB2QQhxIgR0IgUgBUGA4B9qQRB2QQRxIgV0IgAgAEGAgA9qQRB2QQJxIgB0QQ92IAQgBXIgAHJrIgRBAXQgAyAEQRVqdkEBcXJBHGohBAsgAiAENgIcIAJCADcCECAEQQJ0QbjSgIAAaiEFAkBBACgCjNCAgAAiAEEBIAR0IghxDQAgBSACNgIAQQAgACAIcjYCjNCAgAAgAiAFNgIYIAIgAjYCCCACIAI2AgwMAwsgA0EAQRkgBEEBdmsgBEEfRht0IQQgBSgCACEAA0AgACIFKAIEQXhxIANGDQIgBEEddiEAIARBAXQhBCAFIABBBHFqQRBqIggoAgAiAA0ACyAIIAI2AgAgAiAFNgIYIAIgAjYCDCACIAI2AggMAgsgAEF4IABrQQ9xQQAgAEEIakEPcRsiA2oiCyAGQUhqIgggA2siA0EBcjYCBCAAIAhqQTg2AgQgBCAFQTcgBWtBD3FBACAFQUlqQQ9xG2pBQWoiCCAIIARBEGpJGyIIQSM2AgRBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAs2AqDQgIAAIAhBEGpBACkC0NOAgAA3AgAgCEEAKQLI04CAADcCCEEAIAhBCGo2AtDTgIAAQQAgBjYCzNOAgABBACAANgLI04CAAEEAQQA2AtTTgIAAIAhBJGohAwNAIANBBzYCACADQQRqIgMgBUkNAAsgCCAERg0DIAggCCgCBEF+cTYCBCAIIAggBGsiADYCACAEIABBAXI2AgQCQCAAQf8BSw0AIABBeHFBsNCAgABqIQMCQAJAQQAoAojQgIAAIgVBASAAQQN2dCIAcQ0AQQAgBSAAcjYCiNCAgAAgAyEFDAELIAMoAgghBQsgBSAENgIMIAMgBDYCCCAEIAM2AgwgBCAFNgIIDAQLQR8hAwJAIABB////B0sNACAAQQh2IgMgA0GA/j9qQRB2QQhxIgN0IgUgBUGA4B9qQRB2QQRxIgV0IgggCEGAgA9qQRB2QQJxIgh0QQ92IAMgBXIgCHJrIgNBAXQgACADQRVqdkEBcXJBHGohAwsgBCADNgIcIARCADcCECADQQJ0QbjSgIAAaiEFAkBBACgCjNCAgAAiCEEBIAN0IgZxDQAgBSAENgIAQQAgCCAGcjYCjNCAgAAgBCAFNgIYIAQgBDYCCCAEIAQ2AgwMBAsgAEEAQRkgA0EBdmsgA0EfRht0IQMgBSgCACEIA0AgCCIFKAIEQXhxIABGDQMgA0EddiEIIANBAXQhAyAFIAhBBHFqQRBqIgYoAgAiCA0ACyAGIAQ2AgAgBCAFNgIYIAQgBDYCDCAEIAQ2AggMAwsgBSgCCCIDIAI2AgwgBSACNgIIIAJBADYCGCACIAU2AgwgAiADNgIICyALQQhqIQMMBQsgBSgCCCIDIAQ2AgwgBSAENgIIIARBADYCGCAEIAU2AgwgBCADNgIIC0EAKAKU0ICAACIDIAJNDQBBACgCoNCAgAAiBCACaiIFIAMgAmsiA0EBcjYCBEEAIAM2ApTQgIAAQQAgBTYCoNCAgAAgBCACQQNyNgIEIARBCGohAwwDC0EAIQNBAEEwNgL404CAAAwCCwJAIAtFDQACQAJAIAggCCgCHCIFQQJ0QbjSgIAAaiIDKAIARw0AIAMgADYCACAADQFBACAHQX4gBXdxIgc2AozQgIAADAILIAtBEEEUIAsoAhAgCEYbaiAANgIAIABFDQELIAAgCzYCGAJAIAgoAhAiA0UNACAAIAM2AhAgAyAANgIYCyAIQRRqKAIAIgNFDQAgAEEUaiADNgIAIAMgADYCGAsCQAJAIARBD0sNACAIIAQgAmoiA0EDcjYCBCAIIANqIgMgAygCBEEBcjYCBAwBCyAIIAJqIgAgBEEBcjYCBCAIIAJBA3I2AgQgACAEaiAENgIAAkAgBEH/AUsNACAEQXhxQbDQgIAAaiEDAkACQEEAKAKI0ICAACIFQQEgBEEDdnQiBHENAEEAIAUgBHI2AojQgIAAIAMhBAwBCyADKAIIIQQLIAQgADYCDCADIAA2AgggACADNgIMIAAgBDYCCAwBC0EfIQMCQCAEQf///wdLDQAgBEEIdiIDIANBgP4/akEQdkEIcSIDdCIFIAVBgOAfakEQdkEEcSIFdCICIAJBgIAPakEQdkECcSICdEEPdiADIAVyIAJyayIDQQF0IAQgA0EVanZBAXFyQRxqIQMLIAAgAzYCHCAAQgA3AhAgA0ECdEG40oCAAGohBQJAIAdBASADdCICcQ0AIAUgADYCAEEAIAcgAnI2AozQgIAAIAAgBTYCGCAAIAA2AgggACAANgIMDAELIARBAEEZIANBAXZrIANBH0YbdCEDIAUoAgAhAgJAA0AgAiIFKAIEQXhxIARGDQEgA0EddiECIANBAXQhAyAFIAJBBHFqQRBqIgYoAgAiAg0ACyAGIAA2AgAgACAFNgIYIAAgADYCDCAAIAA2AggMAQsgBSgCCCIDIAA2AgwgBSAANgIIIABBADYCGCAAIAU2AgwgACADNgIICyAIQQhqIQMMAQsCQCAKRQ0AAkACQCAAIAAoAhwiBUECdEG40oCAAGoiAygCAEcNACADIAg2AgAgCA0BQQAgCUF+IAV3cTYCjNCAgAAMAgsgCkEQQRQgCigCECAARhtqIAg2AgAgCEUNAQsgCCAKNgIYAkAgACgCECIDRQ0AIAggAzYCECADIAg2AhgLIABBFGooAgAiA0UNACAIQRRqIAM2AgAgAyAINgIYCwJAAkAgBEEPSw0AIAAgBCACaiIDQQNyNgIEIAAgA2oiAyADKAIEQQFyNgIEDAELIAAgAmoiBSAEQQFyNgIEIAAgAkEDcjYCBCAFIARqIAQ2AgACQCAHRQ0AIAdBeHFBsNCAgABqIQJBACgCnNCAgAAhAwJAAkBBASAHQQN2dCIIIAZxDQBBACAIIAZyNgKI0ICAACACIQgMAQsgAigCCCEICyAIIAM2AgwgAiADNgIIIAMgAjYCDCADIAg2AggLQQAgBTYCnNCAgABBACAENgKQ0ICAAAsgAEEIaiEDCyABQRBqJICAgIAAIAMLCgAgABDJgICAAAviDQEHfwJAIABFDQAgAEF4aiIBIABBfGooAgAiAkF4cSIAaiEDAkAgAkEBcQ0AIAJBA3FFDQEgASABKAIAIgJrIgFBACgCmNCAgAAiBEkNASACIABqIQACQCABQQAoApzQgIAARg0AAkAgAkH/AUsNACABKAIIIgQgAkEDdiIFQQN0QbDQgIAAaiIGRhoCQCABKAIMIgIgBEcNAEEAQQAoAojQgIAAQX4gBXdxNgKI0ICAAAwDCyACIAZGGiACIAQ2AgggBCACNgIMDAILIAEoAhghBwJAAkAgASgCDCIGIAFGDQAgASgCCCICIARJGiAGIAI2AgggAiAGNgIMDAELAkAgAUEUaiICKAIAIgQNACABQRBqIgIoAgAiBA0AQQAhBgwBCwNAIAIhBSAEIgZBFGoiAigCACIEDQAgBkEQaiECIAYoAhAiBA0ACyAFQQA2AgALIAdFDQECQAJAIAEgASgCHCIEQQJ0QbjSgIAAaiICKAIARw0AIAIgBjYCACAGDQFBAEEAKAKM0ICAAEF+IAR3cTYCjNCAgAAMAwsgB0EQQRQgBygCECABRhtqIAY2AgAgBkUNAgsgBiAHNgIYAkAgASgCECICRQ0AIAYgAjYCECACIAY2AhgLIAEoAhQiAkUNASAGQRRqIAI2AgAgAiAGNgIYDAELIAMoAgQiAkEDcUEDRw0AIAMgAkF+cTYCBEEAIAA2ApDQgIAAIAEgAGogADYCACABIABBAXI2AgQPCyABIANPDQAgAygCBCICQQFxRQ0AAkACQCACQQJxDQACQCADQQAoAqDQgIAARw0AQQAgATYCoNCAgABBAEEAKAKU0ICAACAAaiIANgKU0ICAACABIABBAXI2AgQgAUEAKAKc0ICAAEcNA0EAQQA2ApDQgIAAQQBBADYCnNCAgAAPCwJAIANBACgCnNCAgABHDQBBACABNgKc0ICAAEEAQQAoApDQgIAAIABqIgA2ApDQgIAAIAEgAEEBcjYCBCABIABqIAA2AgAPCyACQXhxIABqIQACQAJAIAJB/wFLDQAgAygCCCIEIAJBA3YiBUEDdEGw0ICAAGoiBkYaAkAgAygCDCICIARHDQBBAEEAKAKI0ICAAEF+IAV3cTYCiNCAgAAMAgsgAiAGRhogAiAENgIIIAQgAjYCDAwBCyADKAIYIQcCQAJAIAMoAgwiBiADRg0AIAMoAggiAkEAKAKY0ICAAEkaIAYgAjYCCCACIAY2AgwMAQsCQCADQRRqIgIoAgAiBA0AIANBEGoiAigCACIEDQBBACEGDAELA0AgAiEFIAQiBkEUaiICKAIAIgQNACAGQRBqIQIgBigCECIEDQALIAVBADYCAAsgB0UNAAJAAkAgAyADKAIcIgRBAnRBuNKAgABqIgIoAgBHDQAgAiAGNgIAIAYNAUEAQQAoAozQgIAAQX4gBHdxNgKM0ICAAAwCCyAHQRBBFCAHKAIQIANGG2ogBjYCACAGRQ0BCyAGIAc2AhgCQCADKAIQIgJFDQAgBiACNgIQIAIgBjYCGAsgAygCFCICRQ0AIAZBFGogAjYCACACIAY2AhgLIAEgAGogADYCACABIABBAXI2AgQgAUEAKAKc0ICAAEcNAUEAIAA2ApDQgIAADwsgAyACQX5xNgIEIAEgAGogADYCACABIABBAXI2AgQLAkAgAEH/AUsNACAAQXhxQbDQgIAAaiECAkACQEEAKAKI0ICAACIEQQEgAEEDdnQiAHENAEEAIAQgAHI2AojQgIAAIAIhAAwBCyACKAIIIQALIAAgATYCDCACIAE2AgggASACNgIMIAEgADYCCA8LQR8hAgJAIABB////B0sNACAAQQh2IgIgAkGA/j9qQRB2QQhxIgJ0IgQgBEGA4B9qQRB2QQRxIgR0IgYgBkGAgA9qQRB2QQJxIgZ0QQ92IAIgBHIgBnJrIgJBAXQgACACQRVqdkEBcXJBHGohAgsgASACNgIcIAFCADcCECACQQJ0QbjSgIAAaiEEAkACQEEAKAKM0ICAACIGQQEgAnQiA3ENACAEIAE2AgBBACAGIANyNgKM0ICAACABIAQ2AhggASABNgIIIAEgATYCDAwBCyAAQQBBGSACQQF2ayACQR9GG3QhAiAEKAIAIQYCQANAIAYiBCgCBEF4cSAARg0BIAJBHXYhBiACQQF0IQIgBCAGQQRxakEQaiIDKAIAIgYNAAsgAyABNgIAIAEgBDYCGCABIAE2AgwgASABNgIIDAELIAQoAggiACABNgIMIAQgATYCCCABQQA2AhggASAENgIMIAEgADYCCAtBAEEAKAKo0ICAAEF/aiIBQX8gARs2AqjQgIAACwsEAAAAC04AAkAgAA0APwBBEHQPCwJAIABB//8DcQ0AIABBf0wNAAJAIABBEHZAACIAQX9HDQBBAEEwNgL404CAAEF/DwsgAEEQdA8LEMqAgIAAAAvyAgIDfwF+AkAgAkUNACAAIAE6AAAgAiAAaiIDQX9qIAE6AAAgAkEDSQ0AIAAgAToAAiAAIAE6AAEgA0F9aiABOgAAIANBfmogAToAACACQQdJDQAgACABOgADIANBfGogAToAACACQQlJDQAgAEEAIABrQQNxIgRqIgMgAUH/AXFBgYKECGwiATYCACADIAIgBGtBfHEiBGoiAkF8aiABNgIAIARBCUkNACADIAE2AgggAyABNgIEIAJBeGogATYCACACQXRqIAE2AgAgBEEZSQ0AIAMgATYCGCADIAE2AhQgAyABNgIQIAMgATYCDCACQXBqIAE2AgAgAkFsaiABNgIAIAJBaGogATYCACACQWRqIAE2AgAgBCADQQRxQRhyIgVrIgJBIEkNACABrUKBgICAEH4hBiADIAVqIQEDQCABIAY3AxggASAGNwMQIAEgBjcDCCABIAY3AwAgAUEgaiEBIAJBYGoiAkEfSw0ACwsgAAsLjkgBAEGACAuGSAEAAAACAAAAAwAAAAAAAAAAAAAABAAAAAUAAAAAAAAAAAAAAAYAAAAHAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAASW52YWxpZCBjaGFyIGluIHVybCBxdWVyeQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX2JvZHkAQ29udGVudC1MZW5ndGggb3ZlcmZsb3cAQ2h1bmsgc2l6ZSBvdmVyZmxvdwBSZXNwb25zZSBvdmVyZmxvdwBJbnZhbGlkIG1ldGhvZCBmb3IgSFRUUC94LnggcmVxdWVzdABJbnZhbGlkIG1ldGhvZCBmb3IgUlRTUC94LnggcmVxdWVzdABFeHBlY3RlZCBTT1VSQ0UgbWV0aG9kIGZvciBJQ0UveC54IHJlcXVlc3QASW52YWxpZCBjaGFyIGluIHVybCBmcmFnbWVudCBzdGFydABFeHBlY3RlZCBkb3QAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9zdGF0dXMASW52YWxpZCByZXNwb25zZSBzdGF0dXMASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucwBVc2VyIGNhbGxiYWNrIGVycm9yAGBvbl9yZXNldGAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2hlYWRlcmAgY2FsbGJhY2sgZXJyb3IAYG9uX21lc3NhZ2VfYmVnaW5gIGNhbGxiYWNrIGVycm9yAGBvbl9jaHVua19leHRlbnNpb25fdmFsdWVgIGNhbGxiYWNrIGVycm9yAGBvbl9zdGF0dXNfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl92ZXJzaW9uX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fdXJsX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl9oZWFkZXJfdmFsdWVfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl9tZXNzYWdlX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fbWV0aG9kX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25faGVhZGVyX2ZpZWxkX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfZXh0ZW5zaW9uX25hbWVgIGNhbGxiYWNrIGVycm9yAFVuZXhwZWN0ZWQgY2hhciBpbiB1cmwgc2VydmVyAEludmFsaWQgaGVhZGVyIHZhbHVlIGNoYXIASW52YWxpZCBoZWFkZXIgZmllbGQgY2hhcgBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX3ZlcnNpb24ASW52YWxpZCBtaW5vciB2ZXJzaW9uAEludmFsaWQgbWFqb3IgdmVyc2lvbgBFeHBlY3RlZCBzcGFjZSBhZnRlciB2ZXJzaW9uAEV4cGVjdGVkIENSTEYgYWZ0ZXIgdmVyc2lvbgBJbnZhbGlkIEhUVFAgdmVyc2lvbgBJbnZhbGlkIGhlYWRlciB0b2tlbgBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX3VybABJbnZhbGlkIGNoYXJhY3RlcnMgaW4gdXJsAFVuZXhwZWN0ZWQgc3RhcnQgY2hhciBpbiB1cmwARG91YmxlIEAgaW4gdXJsAEVtcHR5IENvbnRlbnQtTGVuZ3RoAEludmFsaWQgY2hhcmFjdGVyIGluIENvbnRlbnQtTGVuZ3RoAER1cGxpY2F0ZSBDb250ZW50LUxlbmd0aABJbnZhbGlkIGNoYXIgaW4gdXJsIHBhdGgAQ29udGVudC1MZW5ndGggY2FuJ3QgYmUgcHJlc2VudCB3aXRoIFRyYW5zZmVyLUVuY29kaW5nAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIHNpemUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9oZWFkZXJfdmFsdWUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9jaHVua19leHRlbnNpb25fdmFsdWUASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucyB2YWx1ZQBNaXNzaW5nIGV4cGVjdGVkIExGIGFmdGVyIGhlYWRlciB2YWx1ZQBJbnZhbGlkIGBUcmFuc2Zlci1FbmNvZGluZ2AgaGVhZGVyIHZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgcXVvdGUgdmFsdWUASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucyBxdW90ZWQgdmFsdWUAUGF1c2VkIGJ5IG9uX2hlYWRlcnNfY29tcGxldGUASW52YWxpZCBFT0Ygc3RhdGUAb25fcmVzZXQgcGF1c2UAb25fY2h1bmtfaGVhZGVyIHBhdXNlAG9uX21lc3NhZ2VfYmVnaW4gcGF1c2UAb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlIHBhdXNlAG9uX3N0YXR1c19jb21wbGV0ZSBwYXVzZQBvbl92ZXJzaW9uX2NvbXBsZXRlIHBhdXNlAG9uX3VybF9jb21wbGV0ZSBwYXVzZQBvbl9jaHVua19jb21wbGV0ZSBwYXVzZQBvbl9oZWFkZXJfdmFsdWVfY29tcGxldGUgcGF1c2UAb25fbWVzc2FnZV9jb21wbGV0ZSBwYXVzZQBvbl9tZXRob2RfY29tcGxldGUgcGF1c2UAb25faGVhZGVyX2ZpZWxkX2NvbXBsZXRlIHBhdXNlAG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lIHBhdXNlAFVuZXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgc3RhcnQgbGluZQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgbmFtZQBQYXVzZSBvbiBDT05ORUNUL1VwZ3JhZGUAUGF1c2Ugb24gUFJJL1VwZ3JhZGUARXhwZWN0ZWQgSFRUUC8yIENvbm5lY3Rpb24gUHJlZmFjZQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX21ldGhvZABFeHBlY3RlZCBzcGFjZSBhZnRlciBtZXRob2QAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9oZWFkZXJfZmllbGQAUGF1c2VkAEludmFsaWQgd29yZCBlbmNvdW50ZXJlZABJbnZhbGlkIG1ldGhvZCBlbmNvdW50ZXJlZABVbmV4cGVjdGVkIGNoYXIgaW4gdXJsIHNjaGVtYQBSZXF1ZXN0IGhhcyBpbnZhbGlkIGBUcmFuc2Zlci1FbmNvZGluZ2AAU1dJVENIX1BST1hZAFVTRV9QUk9YWQBNS0FDVElWSVRZAFVOUFJPQ0VTU0FCTEVfRU5USVRZAENPUFkATU9WRURfUEVSTUFORU5UTFkAVE9PX0VBUkxZAE5PVElGWQBGQUlMRURfREVQRU5ERU5DWQBCQURfR0FURVdBWQBQTEFZAFBVVABDSEVDS09VVABHQVRFV0FZX1RJTUVPVVQAUkVRVUVTVF9USU1FT1VUAE5FVFdPUktfQ09OTkVDVF9USU1FT1VUAENPTk5FQ1RJT05fVElNRU9VVABMT0dJTl9USU1FT1VUAE5FVFdPUktfUkVBRF9USU1FT1VUAFBPU1QATUlTRElSRUNURURfUkVRVUVTVABDTElFTlRfQ0xPU0VEX1JFUVVFU1QAQ0xJRU5UX0NMT1NFRF9MT0FEX0JBTEFOQ0VEX1JFUVVFU1QAQkFEX1JFUVVFU1QASFRUUF9SRVFVRVNUX1NFTlRfVE9fSFRUUFNfUE9SVABSRVBPUlQASU1fQV9URUFQT1QAUkVTRVRfQ09OVEVOVABOT19DT05URU5UAFBBUlRJQUxfQ09OVEVOVABIUEVfSU5WQUxJRF9DT05TVEFOVABIUEVfQ0JfUkVTRVQAR0VUAEhQRV9TVFJJQ1QAQ09ORkxJQ1QAVEVNUE9SQVJZX1JFRElSRUNUAFBFUk1BTkVOVF9SRURJUkVDVABDT05ORUNUAE1VTFRJX1NUQVRVUwBIUEVfSU5WQUxJRF9TVEFUVVMAVE9PX01BTllfUkVRVUVTVFMARUFSTFlfSElOVFMAVU5BVkFJTEFCTEVfRk9SX0xFR0FMX1JFQVNPTlMAT1BUSU9OUwBTV0lUQ0hJTkdfUFJPVE9DT0xTAFZBUklBTlRfQUxTT19ORUdPVElBVEVTAE1VTFRJUExFX0NIT0lDRVMASU5URVJOQUxfU0VSVkVSX0VSUk9SAFdFQl9TRVJWRVJfVU5LTk9XTl9FUlJPUgBSQUlMR1VOX0VSUk9SAElERU5USVRZX1BST1ZJREVSX0FVVEhFTlRJQ0FUSU9OX0VSUk9SAFNTTF9DRVJUSUZJQ0FURV9FUlJPUgBJTlZBTElEX1hfRk9SV0FSREVEX0ZPUgBTRVRfUEFSQU1FVEVSAEdFVF9QQVJBTUVURVIASFBFX1VTRVIAU0VFX09USEVSAEhQRV9DQl9DSFVOS19IRUFERVIATUtDQUxFTkRBUgBTRVRVUABXRUJfU0VSVkVSX0lTX0RPV04AVEVBUkRPV04ASFBFX0NMT1NFRF9DT05ORUNUSU9OAEhFVVJJU1RJQ19FWFBJUkFUSU9OAERJU0NPTk5FQ1RFRF9PUEVSQVRJT04ATk9OX0FVVEhPUklUQVRJVkVfSU5GT1JNQVRJT04ASFBFX0lOVkFMSURfVkVSU0lPTgBIUEVfQ0JfTUVTU0FHRV9CRUdJTgBTSVRFX0lTX0ZST1pFTgBIUEVfSU5WQUxJRF9IRUFERVJfVE9LRU4ASU5WQUxJRF9UT0tFTgBGT1JCSURERU4ARU5IQU5DRV9ZT1VSX0NBTE0ASFBFX0lOVkFMSURfVVJMAEJMT0NLRURfQllfUEFSRU5UQUxfQ09OVFJPTABNS0NPTABBQ0wASFBFX0lOVEVSTkFMAFJFUVVFU1RfSEVBREVSX0ZJRUxEU19UT09fTEFSR0VfVU5PRkZJQ0lBTABIUEVfT0sAVU5MSU5LAFVOTE9DSwBQUkkAUkVUUllfV0lUSABIUEVfSU5WQUxJRF9DT05URU5UX0xFTkdUSABIUEVfVU5FWFBFQ1RFRF9DT05URU5UX0xFTkdUSABGTFVTSABQUk9QUEFUQ0gATS1TRUFSQ0gAVVJJX1RPT19MT05HAFBST0NFU1NJTkcATUlTQ0VMTEFORU9VU19QRVJTSVNURU5UX1dBUk5JTkcATUlTQ0VMTEFORU9VU19XQVJOSU5HAEhQRV9JTlZBTElEX1RSQU5TRkVSX0VOQ09ESU5HAEV4cGVjdGVkIENSTEYASFBFX0lOVkFMSURfQ0hVTktfU0laRQBNT1ZFAENPTlRJTlVFAEhQRV9DQl9TVEFUVVNfQ09NUExFVEUASFBFX0NCX0hFQURFUlNfQ09NUExFVEUASFBFX0NCX1ZFUlNJT05fQ09NUExFVEUASFBFX0NCX1VSTF9DT01QTEVURQBIUEVfQ0JfQ0hVTktfQ09NUExFVEUASFBFX0NCX0hFQURFUl9WQUxVRV9DT01QTEVURQBIUEVfQ0JfQ0hVTktfRVhURU5TSU9OX1ZBTFVFX0NPTVBMRVRFAEhQRV9DQl9DSFVOS19FWFRFTlNJT05fTkFNRV9DT01QTEVURQBIUEVfQ0JfTUVTU0FHRV9DT01QTEVURQBIUEVfQ0JfTUVUSE9EX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJfRklFTERfQ09NUExFVEUAREVMRVRFAEhQRV9JTlZBTElEX0VPRl9TVEFURQBJTlZBTElEX1NTTF9DRVJUSUZJQ0FURQBQQVVTRQBOT19SRVNQT05TRQBVTlNVUFBPUlRFRF9NRURJQV9UWVBFAEdPTkUATk9UX0FDQ0VQVEFCTEUAU0VSVklDRV9VTkFWQUlMQUJMRQBSQU5HRV9OT1RfU0FUSVNGSUFCTEUAT1JJR0lOX0lTX1VOUkVBQ0hBQkxFAFJFU1BPTlNFX0lTX1NUQUxFAFBVUkdFAE1FUkdFAFJFUVVFU1RfSEVBREVSX0ZJRUxEU19UT09fTEFSR0UAUkVRVUVTVF9IRUFERVJfVE9PX0xBUkdFAFBBWUxPQURfVE9PX0xBUkdFAElOU1VGRklDSUVOVF9TVE9SQUdFAEhQRV9QQVVTRURfVVBHUkFERQBIUEVfUEFVU0VEX0gyX1VQR1JBREUAU09VUkNFAEFOTk9VTkNFAFRSQUNFAEhQRV9VTkVYUEVDVEVEX1NQQUNFAERFU0NSSUJFAFVOU1VCU0NSSUJFAFJFQ09SRABIUEVfSU5WQUxJRF9NRVRIT0QATk9UX0ZPVU5EAFBST1BGSU5EAFVOQklORABSRUJJTkQAVU5BVVRIT1JJWkVEAE1FVEhPRF9OT1RfQUxMT1dFRABIVFRQX1ZFUlNJT05fTk9UX1NVUFBPUlRFRABBTFJFQURZX1JFUE9SVEVEAEFDQ0VQVEVEAE5PVF9JTVBMRU1FTlRFRABMT09QX0RFVEVDVEVEAEhQRV9DUl9FWFBFQ1RFRABIUEVfTEZfRVhQRUNURUQAQ1JFQVRFRABJTV9VU0VEAEhQRV9QQVVTRUQAVElNRU9VVF9PQ0NVUkVEAFBBWU1FTlRfUkVRVUlSRUQAUFJFQ09ORElUSU9OX1JFUVVJUkVEAFBST1hZX0FVVEhFTlRJQ0FUSU9OX1JFUVVJUkVEAE5FVFdPUktfQVVUSEVOVElDQVRJT05fUkVRVUlSRUQATEVOR1RIX1JFUVVJUkVEAFNTTF9DRVJUSUZJQ0FURV9SRVFVSVJFRABVUEdSQURFX1JFUVVJUkVEAFBBR0VfRVhQSVJFRABQUkVDT05ESVRJT05fRkFJTEVEAEVYUEVDVEFUSU9OX0ZBSUxFRABSRVZBTElEQVRJT05fRkFJTEVEAFNTTF9IQU5EU0hBS0VfRkFJTEVEAExPQ0tFRABUUkFOU0ZPUk1BVElPTl9BUFBMSUVEAE5PVF9NT0RJRklFRABOT1RfRVhURU5ERUQAQkFORFdJRFRIX0xJTUlUX0VYQ0VFREVEAFNJVEVfSVNfT1ZFUkxPQURFRABIRUFEAEV4cGVjdGVkIEhUVFAvAABeEwAAJhMAADAQAADwFwAAnRMAABUSAAA5FwAA8BIAAAoQAAB1EgAArRIAAIITAABPFAAAfxAAAKAVAAAjFAAAiRIAAIsUAABNFQAA1BEAAM8UAAAQGAAAyRYAANwWAADBEQAA4BcAALsUAAB0FAAAfBUAAOUUAAAIFwAAHxAAAGUVAACjFAAAKBUAAAIVAACZFQAALBAAAIsZAABPDwAA1A4AAGoQAADOEAAAAhcAAIkOAABuEwAAHBMAAGYUAABWFwAAwRMAAM0TAABsEwAAaBcAAGYXAABfFwAAIhMAAM4PAABpDgAA2A4AAGMWAADLEwAAqg4AACgXAAAmFwAAxRMAAF0WAADoEQAAZxMAAGUTAADyFgAAcxMAAB0XAAD5FgAA8xEAAM8OAADOFQAADBIAALMRAAClEQAAYRAAADIXAAC7EwAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAgEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAgMCAgICAgAAAgIAAgIAAgICAgICAgICAgAEAAAAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgAAAAICAgICAgICAgICAgICAgICAgICAgICAgICAgICAAIAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAIAAgICAgIAAAICAAICAAICAgICAgICAgIAAwAEAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgICAgACAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABsb3NlZWVwLWFsaXZlAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAgEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQFjaHVua2VkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQABAQEBAQAAAQEAAQEAAQEBAQEBAQEBAQAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGVjdGlvbmVudC1sZW5ndGhvbnJveHktY29ubmVjdGlvbgAAAAAAAAAAAAAAAAAAAHJhbnNmZXItZW5jb2RpbmdwZ3JhZGUNCg0KDQpTTQ0KDQpUVFAvQ0UvVFNQLwAAAAAAAAAAAAAAAAECAAEDAAAAAAAAAAAAAAAAAAAAAAAABAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAABAgABAwAAAAAAAAAAAAAAAAAAAAAAAAQBAQUBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAQAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAABAAACAAAAAAAAAAAAAAAAAAAAAAAAAwQAAAQEBAQEBAQEBAQEBQQEBAQEBAQEBAQEBAAEAAYHBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQABAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAQAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAgAAAAACAAAAAAAAAAAAAAAAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAE5PVU5DRUVDS09VVE5FQ1RFVEVDUklCRUxVU0hFVEVBRFNFQVJDSFJHRUNUSVZJVFlMRU5EQVJWRU9USUZZUFRJT05TQ0hTRUFZU1RBVENIR0VPUkRJUkVDVE9SVFJDSFBBUkFNRVRFUlVSQ0VCU0NSSUJFQVJET1dOQUNFSU5ETktDS1VCU0NSSUJFSFRUUC9BRFRQLw==' + + +/***/ }), + +/***/ 172: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.enumToMap = void 0; +function enumToMap(obj) { + const res = {}; + Object.keys(obj).forEach((key) => { + const value = obj[key]; + if (typeof value === 'number') { + res[key] = value; + } + }); + return res; +} +exports.enumToMap = enumToMap; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 7501: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { kClients } = __nccwpck_require__(6443) +const Agent = __nccwpck_require__(9965) +const { + kAgent, + kMockAgentSet, + kMockAgentGet, + kDispatches, + kIsMockActive, + kNetConnect, + kGetNetConnect, + kOptions, + kFactory +} = __nccwpck_require__(1117) +const MockClient = __nccwpck_require__(7365) +const MockPool = __nccwpck_require__(4004) +const { matchValue, buildMockOptions } = __nccwpck_require__(3397) +const { InvalidArgumentError, UndiciError } = __nccwpck_require__(8707) +const Dispatcher = __nccwpck_require__(992) +const Pluralizer = __nccwpck_require__(1529) +const PendingInterceptorsFormatter = __nccwpck_require__(6142) + +class FakeWeakRef { + constructor (value) { + this.value = value + } + + deref () { + return this.value + } +} + +class MockAgent extends Dispatcher { + constructor (opts) { + super(opts) + + this[kNetConnect] = true + this[kIsMockActive] = true + + // Instantiate Agent and encapsulate + if ((opts && opts.agent && typeof opts.agent.dispatch !== 'function')) { + throw new InvalidArgumentError('Argument opts.agent must implement Agent') + } + const agent = opts && opts.agent ? opts.agent : new Agent(opts) + this[kAgent] = agent + + this[kClients] = agent[kClients] + this[kOptions] = buildMockOptions(opts) + } + + get (origin) { + let dispatcher = this[kMockAgentGet](origin) + + if (!dispatcher) { + dispatcher = this[kFactory](origin) + this[kMockAgentSet](origin, dispatcher) + } + return dispatcher + } + + dispatch (opts, handler) { + // Call MockAgent.get to perform additional setup before dispatching as normal + this.get(opts.origin) + return this[kAgent].dispatch(opts, handler) + } + + async close () { + await this[kAgent].close() + this[kClients].clear() + } + + deactivate () { + this[kIsMockActive] = false + } + + activate () { + this[kIsMockActive] = true + } + + enableNetConnect (matcher) { + if (typeof matcher === 'string' || typeof matcher === 'function' || matcher instanceof RegExp) { + if (Array.isArray(this[kNetConnect])) { + this[kNetConnect].push(matcher) + } else { + this[kNetConnect] = [matcher] + } + } else if (typeof matcher === 'undefined') { + this[kNetConnect] = true + } else { + throw new InvalidArgumentError('Unsupported matcher. Must be one of String|Function|RegExp.') + } + } + + disableNetConnect () { + this[kNetConnect] = false + } + + // This is required to bypass issues caused by using global symbols - see: + // https://github.com/nodejs/undici/issues/1447 + get isMockActive () { + return this[kIsMockActive] + } + + [kMockAgentSet] (origin, dispatcher) { + this[kClients].set(origin, new FakeWeakRef(dispatcher)) + } + + [kFactory] (origin) { + const mockOptions = Object.assign({ agent: this }, this[kOptions]) + return this[kOptions] && this[kOptions].connections === 1 + ? new MockClient(origin, mockOptions) + : new MockPool(origin, mockOptions) + } + + [kMockAgentGet] (origin) { + // First check if we can immediately find it + const ref = this[kClients].get(origin) + if (ref) { + return ref.deref() + } + + // If the origin is not a string create a dummy parent pool and return to user + if (typeof origin !== 'string') { + const dispatcher = this[kFactory]('http://localhost:9999') + this[kMockAgentSet](origin, dispatcher) + return dispatcher + } + + // If we match, create a pool and assign the same dispatches + for (const [keyMatcher, nonExplicitRef] of Array.from(this[kClients])) { + const nonExplicitDispatcher = nonExplicitRef.deref() + if (nonExplicitDispatcher && typeof keyMatcher !== 'string' && matchValue(keyMatcher, origin)) { + const dispatcher = this[kFactory](origin) + this[kMockAgentSet](origin, dispatcher) + dispatcher[kDispatches] = nonExplicitDispatcher[kDispatches] + return dispatcher + } + } + } + + [kGetNetConnect] () { + return this[kNetConnect] + } + + pendingInterceptors () { + const mockAgentClients = this[kClients] + + return Array.from(mockAgentClients.entries()) + .flatMap(([origin, scope]) => scope.deref()[kDispatches].map(dispatch => ({ ...dispatch, origin }))) + .filter(({ pending }) => pending) + } + + assertNoPendingInterceptors ({ pendingInterceptorsFormatter = new PendingInterceptorsFormatter() } = {}) { + const pending = this.pendingInterceptors() + + if (pending.length === 0) { + return + } + + const pluralizer = new Pluralizer('interceptor', 'interceptors').pluralize(pending.length) + + throw new UndiciError(` +${pluralizer.count} ${pluralizer.noun} ${pluralizer.is} pending: + +${pendingInterceptorsFormatter.format(pending)} +`.trim()) + } +} + +module.exports = MockAgent + + +/***/ }), + +/***/ 7365: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { promisify } = __nccwpck_require__(9023) +const Client = __nccwpck_require__(6197) +const { buildMockDispatch } = __nccwpck_require__(3397) +const { + kDispatches, + kMockAgent, + kClose, + kOriginalClose, + kOrigin, + kOriginalDispatch, + kConnected +} = __nccwpck_require__(1117) +const { MockInterceptor } = __nccwpck_require__(1511) +const Symbols = __nccwpck_require__(6443) +const { InvalidArgumentError } = __nccwpck_require__(8707) + +/** + * MockClient provides an API that extends the Client to influence the mockDispatches. + */ +class MockClient extends Client { + constructor (origin, opts) { + super(origin, opts) + + if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') { + throw new InvalidArgumentError('Argument opts.agent must implement Agent') + } + + this[kMockAgent] = opts.agent + this[kOrigin] = origin + this[kDispatches] = [] + this[kConnected] = 1 + this[kOriginalDispatch] = this.dispatch + this[kOriginalClose] = this.close.bind(this) + + this.dispatch = buildMockDispatch.call(this) + this.close = this[kClose] + } + + get [Symbols.kConnected] () { + return this[kConnected] + } + + /** + * Sets up the base interceptor for mocking replies from undici. + */ + intercept (opts) { + return new MockInterceptor(opts, this[kDispatches]) + } + + async [kClose] () { + await promisify(this[kOriginalClose])() + this[kConnected] = 0 + this[kMockAgent][Symbols.kClients].delete(this[kOrigin]) + } +} + +module.exports = MockClient + + +/***/ }), + +/***/ 2429: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { UndiciError } = __nccwpck_require__(8707) + +class MockNotMatchedError extends UndiciError { + constructor (message) { + super(message) + Error.captureStackTrace(this, MockNotMatchedError) + this.name = 'MockNotMatchedError' + this.message = message || 'The request does not match any registered mock dispatches' + this.code = 'UND_MOCK_ERR_MOCK_NOT_MATCHED' + } +} + +module.exports = { + MockNotMatchedError +} + + +/***/ }), + +/***/ 1511: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { getResponseData, buildKey, addMockDispatch } = __nccwpck_require__(3397) +const { + kDispatches, + kDispatchKey, + kDefaultHeaders, + kDefaultTrailers, + kContentLength, + kMockDispatch +} = __nccwpck_require__(1117) +const { InvalidArgumentError } = __nccwpck_require__(8707) +const { buildURL } = __nccwpck_require__(3440) + +/** + * Defines the scope API for an interceptor reply + */ +class MockScope { + constructor (mockDispatch) { + this[kMockDispatch] = mockDispatch + } + + /** + * Delay a reply by a set amount in ms. + */ + delay (waitInMs) { + if (typeof waitInMs !== 'number' || !Number.isInteger(waitInMs) || waitInMs <= 0) { + throw new InvalidArgumentError('waitInMs must be a valid integer > 0') + } + + this[kMockDispatch].delay = waitInMs + return this + } + + /** + * For a defined reply, never mark as consumed. + */ + persist () { + this[kMockDispatch].persist = true + return this + } + + /** + * Allow one to define a reply for a set amount of matching requests. + */ + times (repeatTimes) { + if (typeof repeatTimes !== 'number' || !Number.isInteger(repeatTimes) || repeatTimes <= 0) { + throw new InvalidArgumentError('repeatTimes must be a valid integer > 0') + } + + this[kMockDispatch].times = repeatTimes + return this + } +} + +/** + * Defines an interceptor for a Mock + */ +class MockInterceptor { + constructor (opts, mockDispatches) { + if (typeof opts !== 'object') { + throw new InvalidArgumentError('opts must be an object') + } + if (typeof opts.path === 'undefined') { + throw new InvalidArgumentError('opts.path must be defined') + } + if (typeof opts.method === 'undefined') { + opts.method = 'GET' + } + // See https://github.com/nodejs/undici/issues/1245 + // As per RFC 3986, clients are not supposed to send URI + // fragments to servers when they retrieve a document, + if (typeof opts.path === 'string') { + if (opts.query) { + opts.path = buildURL(opts.path, opts.query) + } else { + // Matches https://github.com/nodejs/undici/blob/main/lib/fetch/index.js#L1811 + const parsedURL = new URL(opts.path, 'data://') + opts.path = parsedURL.pathname + parsedURL.search + } + } + if (typeof opts.method === 'string') { + opts.method = opts.method.toUpperCase() + } + + this[kDispatchKey] = buildKey(opts) + this[kDispatches] = mockDispatches + this[kDefaultHeaders] = {} + this[kDefaultTrailers] = {} + this[kContentLength] = false + } + + createMockScopeDispatchData (statusCode, data, responseOptions = {}) { + const responseData = getResponseData(data) + const contentLength = this[kContentLength] ? { 'content-length': responseData.length } : {} + const headers = { ...this[kDefaultHeaders], ...contentLength, ...responseOptions.headers } + const trailers = { ...this[kDefaultTrailers], ...responseOptions.trailers } + + return { statusCode, data, headers, trailers } + } + + validateReplyParameters (statusCode, data, responseOptions) { + if (typeof statusCode === 'undefined') { + throw new InvalidArgumentError('statusCode must be defined') + } + if (typeof data === 'undefined') { + throw new InvalidArgumentError('data must be defined') + } + if (typeof responseOptions !== 'object') { + throw new InvalidArgumentError('responseOptions must be an object') + } + } + + /** + * Mock an undici request with a defined reply. + */ + reply (replyData) { + // Values of reply aren't available right now as they + // can only be available when the reply callback is invoked. + if (typeof replyData === 'function') { + // We'll first wrap the provided callback in another function, + // this function will properly resolve the data from the callback + // when invoked. + const wrappedDefaultsCallback = (opts) => { + // Our reply options callback contains the parameter for statusCode, data and options. + const resolvedData = replyData(opts) + + // Check if it is in the right format + if (typeof resolvedData !== 'object') { + throw new InvalidArgumentError('reply options callback must return an object') + } + + const { statusCode, data = '', responseOptions = {} } = resolvedData + this.validateReplyParameters(statusCode, data, responseOptions) + // Since the values can be obtained immediately we return them + // from this higher order function that will be resolved later. + return { + ...this.createMockScopeDispatchData(statusCode, data, responseOptions) + } + } + + // Add usual dispatch data, but this time set the data parameter to function that will eventually provide data. + const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], wrappedDefaultsCallback) + return new MockScope(newMockDispatch) + } + + // We can have either one or three parameters, if we get here, + // we should have 1-3 parameters. So we spread the arguments of + // this function to obtain the parameters, since replyData will always + // just be the statusCode. + const [statusCode, data = '', responseOptions = {}] = [...arguments] + this.validateReplyParameters(statusCode, data, responseOptions) + + // Send in-already provided data like usual + const dispatchData = this.createMockScopeDispatchData(statusCode, data, responseOptions) + const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], dispatchData) + return new MockScope(newMockDispatch) + } + + /** + * Mock an undici request with a defined error. + */ + replyWithError (error) { + if (typeof error === 'undefined') { + throw new InvalidArgumentError('error must be defined') + } + + const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], { error }) + return new MockScope(newMockDispatch) + } + + /** + * Set default reply headers on the interceptor for subsequent replies + */ + defaultReplyHeaders (headers) { + if (typeof headers === 'undefined') { + throw new InvalidArgumentError('headers must be defined') + } + + this[kDefaultHeaders] = headers + return this + } + + /** + * Set default reply trailers on the interceptor for subsequent replies + */ + defaultReplyTrailers (trailers) { + if (typeof trailers === 'undefined') { + throw new InvalidArgumentError('trailers must be defined') + } + + this[kDefaultTrailers] = trailers + return this + } + + /** + * Set reply content length header for replies on the interceptor + */ + replyContentLength () { + this[kContentLength] = true + return this + } +} + +module.exports.MockInterceptor = MockInterceptor +module.exports.MockScope = MockScope + + +/***/ }), + +/***/ 4004: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { promisify } = __nccwpck_require__(9023) +const Pool = __nccwpck_require__(5076) +const { buildMockDispatch } = __nccwpck_require__(3397) +const { + kDispatches, + kMockAgent, + kClose, + kOriginalClose, + kOrigin, + kOriginalDispatch, + kConnected +} = __nccwpck_require__(1117) +const { MockInterceptor } = __nccwpck_require__(1511) +const Symbols = __nccwpck_require__(6443) +const { InvalidArgumentError } = __nccwpck_require__(8707) + +/** + * MockPool provides an API that extends the Pool to influence the mockDispatches. + */ +class MockPool extends Pool { + constructor (origin, opts) { + super(origin, opts) + + if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') { + throw new InvalidArgumentError('Argument opts.agent must implement Agent') + } + + this[kMockAgent] = opts.agent + this[kOrigin] = origin + this[kDispatches] = [] + this[kConnected] = 1 + this[kOriginalDispatch] = this.dispatch + this[kOriginalClose] = this.close.bind(this) + + this.dispatch = buildMockDispatch.call(this) + this.close = this[kClose] + } + + get [Symbols.kConnected] () { + return this[kConnected] + } + + /** + * Sets up the base interceptor for mocking replies from undici. + */ + intercept (opts) { + return new MockInterceptor(opts, this[kDispatches]) + } + + async [kClose] () { + await promisify(this[kOriginalClose])() + this[kConnected] = 0 + this[kMockAgent][Symbols.kClients].delete(this[kOrigin]) + } +} + +module.exports = MockPool + + +/***/ }), + +/***/ 1117: +/***/ ((module) => { + +"use strict"; + + +module.exports = { + kAgent: Symbol('agent'), + kOptions: Symbol('options'), + kFactory: Symbol('factory'), + kDispatches: Symbol('dispatches'), + kDispatchKey: Symbol('dispatch key'), + kDefaultHeaders: Symbol('default headers'), + kDefaultTrailers: Symbol('default trailers'), + kContentLength: Symbol('content length'), + kMockAgent: Symbol('mock agent'), + kMockAgentSet: Symbol('mock agent set'), + kMockAgentGet: Symbol('mock agent get'), + kMockDispatch: Symbol('mock dispatch'), + kClose: Symbol('close'), + kOriginalClose: Symbol('original agent close'), + kOrigin: Symbol('origin'), + kIsMockActive: Symbol('is mock active'), + kNetConnect: Symbol('net connect'), + kGetNetConnect: Symbol('get net connect'), + kConnected: Symbol('connected') +} + + +/***/ }), + +/***/ 3397: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { MockNotMatchedError } = __nccwpck_require__(2429) +const { + kDispatches, + kMockAgent, + kOriginalDispatch, + kOrigin, + kGetNetConnect +} = __nccwpck_require__(1117) +const { buildURL, nop } = __nccwpck_require__(3440) +const { STATUS_CODES } = __nccwpck_require__(8611) +const { + types: { + isPromise + } +} = __nccwpck_require__(9023) + +function matchValue (match, value) { + if (typeof match === 'string') { + return match === value + } + if (match instanceof RegExp) { + return match.test(value) + } + if (typeof match === 'function') { + return match(value) === true + } + return false +} + +function lowerCaseEntries (headers) { + return Object.fromEntries( + Object.entries(headers).map(([headerName, headerValue]) => { + return [headerName.toLocaleLowerCase(), headerValue] + }) + ) +} + +/** + * @param {import('../../index').Headers|string[]|Record} headers + * @param {string} key + */ +function getHeaderByName (headers, key) { + if (Array.isArray(headers)) { + for (let i = 0; i < headers.length; i += 2) { + if (headers[i].toLocaleLowerCase() === key.toLocaleLowerCase()) { + return headers[i + 1] + } + } + + return undefined + } else if (typeof headers.get === 'function') { + return headers.get(key) + } else { + return lowerCaseEntries(headers)[key.toLocaleLowerCase()] + } +} + +/** @param {string[]} headers */ +function buildHeadersFromArray (headers) { // fetch HeadersList + const clone = headers.slice() + const entries = [] + for (let index = 0; index < clone.length; index += 2) { + entries.push([clone[index], clone[index + 1]]) + } + return Object.fromEntries(entries) +} + +function matchHeaders (mockDispatch, headers) { + if (typeof mockDispatch.headers === 'function') { + if (Array.isArray(headers)) { // fetch HeadersList + headers = buildHeadersFromArray(headers) + } + return mockDispatch.headers(headers ? lowerCaseEntries(headers) : {}) + } + if (typeof mockDispatch.headers === 'undefined') { + return true + } + if (typeof headers !== 'object' || typeof mockDispatch.headers !== 'object') { + return false + } + + for (const [matchHeaderName, matchHeaderValue] of Object.entries(mockDispatch.headers)) { + const headerValue = getHeaderByName(headers, matchHeaderName) + + if (!matchValue(matchHeaderValue, headerValue)) { + return false + } + } + return true +} + +function safeUrl (path) { + if (typeof path !== 'string') { + return path + } + + const pathSegments = path.split('?') + + if (pathSegments.length !== 2) { + return path + } + + const qp = new URLSearchParams(pathSegments.pop()) + qp.sort() + return [...pathSegments, qp.toString()].join('?') +} + +function matchKey (mockDispatch, { path, method, body, headers }) { + const pathMatch = matchValue(mockDispatch.path, path) + const methodMatch = matchValue(mockDispatch.method, method) + const bodyMatch = typeof mockDispatch.body !== 'undefined' ? matchValue(mockDispatch.body, body) : true + const headersMatch = matchHeaders(mockDispatch, headers) + return pathMatch && methodMatch && bodyMatch && headersMatch +} + +function getResponseData (data) { + if (Buffer.isBuffer(data)) { + return data + } else if (typeof data === 'object') { + return JSON.stringify(data) + } else { + return data.toString() + } +} + +function getMockDispatch (mockDispatches, key) { + const basePath = key.query ? buildURL(key.path, key.query) : key.path + const resolvedPath = typeof basePath === 'string' ? safeUrl(basePath) : basePath + + // Match path + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path }) => matchValue(safeUrl(path), resolvedPath)) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`) + } + + // Match method + matchedMockDispatches = matchedMockDispatches.filter(({ method }) => matchValue(method, key.method)) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for method '${key.method}'`) + } + + // Match body + matchedMockDispatches = matchedMockDispatches.filter(({ body }) => typeof body !== 'undefined' ? matchValue(body, key.body) : true) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for body '${key.body}'`) + } + + // Match headers + matchedMockDispatches = matchedMockDispatches.filter((mockDispatch) => matchHeaders(mockDispatch, key.headers)) + if (matchedMockDispatches.length === 0) { + throw new MockNotMatchedError(`Mock dispatch not matched for headers '${typeof key.headers === 'object' ? JSON.stringify(key.headers) : key.headers}'`) + } + + return matchedMockDispatches[0] +} + +function addMockDispatch (mockDispatches, key, data) { + const baseData = { timesInvoked: 0, times: 1, persist: false, consumed: false } + const replyData = typeof data === 'function' ? { callback: data } : { ...data } + const newMockDispatch = { ...baseData, ...key, pending: true, data: { error: null, ...replyData } } + mockDispatches.push(newMockDispatch) + return newMockDispatch +} + +function deleteMockDispatch (mockDispatches, key) { + const index = mockDispatches.findIndex(dispatch => { + if (!dispatch.consumed) { + return false + } + return matchKey(dispatch, key) + }) + if (index !== -1) { + mockDispatches.splice(index, 1) + } +} + +function buildKey (opts) { + const { path, method, body, headers, query } = opts + return { + path, + method, + body, + headers, + query + } +} + +function generateKeyValues (data) { + return Object.entries(data).reduce((keyValuePairs, [key, value]) => [ + ...keyValuePairs, + Buffer.from(`${key}`), + Array.isArray(value) ? value.map(x => Buffer.from(`${x}`)) : Buffer.from(`${value}`) + ], []) +} + +/** + * @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Status + * @param {number} statusCode + */ +function getStatusText (statusCode) { + return STATUS_CODES[statusCode] || 'unknown' +} + +async function getResponse (body) { + const buffers = [] + for await (const data of body) { + buffers.push(data) + } + return Buffer.concat(buffers).toString('utf8') +} + +/** + * Mock dispatch function used to simulate undici dispatches + */ +function mockDispatch (opts, handler) { + // Get mock dispatch from built key + const key = buildKey(opts) + const mockDispatch = getMockDispatch(this[kDispatches], key) + + mockDispatch.timesInvoked++ + + // Here's where we resolve a callback if a callback is present for the dispatch data. + if (mockDispatch.data.callback) { + mockDispatch.data = { ...mockDispatch.data, ...mockDispatch.data.callback(opts) } + } + + // Parse mockDispatch data + const { data: { statusCode, data, headers, trailers, error }, delay, persist } = mockDispatch + const { timesInvoked, times } = mockDispatch + + // If it's used up and not persistent, mark as consumed + mockDispatch.consumed = !persist && timesInvoked >= times + mockDispatch.pending = timesInvoked < times + + // If specified, trigger dispatch error + if (error !== null) { + deleteMockDispatch(this[kDispatches], key) + handler.onError(error) + return true + } + + // Handle the request with a delay if necessary + if (typeof delay === 'number' && delay > 0) { + setTimeout(() => { + handleReply(this[kDispatches]) + }, delay) + } else { + handleReply(this[kDispatches]) + } + + function handleReply (mockDispatches, _data = data) { + // fetch's HeadersList is a 1D string array + const optsHeaders = Array.isArray(opts.headers) + ? buildHeadersFromArray(opts.headers) + : opts.headers + const body = typeof _data === 'function' + ? _data({ ...opts, headers: optsHeaders }) + : _data + + // util.types.isPromise is likely needed for jest. + if (isPromise(body)) { + // If handleReply is asynchronous, throwing an error + // in the callback will reject the promise, rather than + // synchronously throw the error, which breaks some tests. + // Rather, we wait for the callback to resolve if it is a + // promise, and then re-run handleReply with the new body. + body.then((newData) => handleReply(mockDispatches, newData)) + return + } + + const responseData = getResponseData(body) + const responseHeaders = generateKeyValues(headers) + const responseTrailers = generateKeyValues(trailers) + + handler.abort = nop + handler.onHeaders(statusCode, responseHeaders, resume, getStatusText(statusCode)) + handler.onData(Buffer.from(responseData)) + handler.onComplete(responseTrailers) + deleteMockDispatch(mockDispatches, key) + } + + function resume () {} + + return true +} + +function buildMockDispatch () { + const agent = this[kMockAgent] + const origin = this[kOrigin] + const originalDispatch = this[kOriginalDispatch] + + return function dispatch (opts, handler) { + if (agent.isMockActive) { + try { + mockDispatch.call(this, opts, handler) + } catch (error) { + if (error instanceof MockNotMatchedError) { + const netConnect = agent[kGetNetConnect]() + if (netConnect === false) { + throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect disabled)`) + } + if (checkNetConnect(netConnect, origin)) { + originalDispatch.call(this, opts, handler) + } else { + throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect is not enabled for this origin)`) + } + } else { + throw error + } + } + } else { + originalDispatch.call(this, opts, handler) + } + } +} + +function checkNetConnect (netConnect, origin) { + const url = new URL(origin) + if (netConnect === true) { + return true + } else if (Array.isArray(netConnect) && netConnect.some((matcher) => matchValue(matcher, url.host))) { + return true + } + return false +} + +function buildMockOptions (opts) { + if (opts) { + const { agent, ...mockOptions } = opts + return mockOptions + } +} + +module.exports = { + getResponseData, + getMockDispatch, + addMockDispatch, + deleteMockDispatch, + buildKey, + generateKeyValues, + matchValue, + getResponse, + getStatusText, + mockDispatch, + buildMockDispatch, + checkNetConnect, + buildMockOptions, + getHeaderByName +} + + +/***/ }), + +/***/ 6142: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { Transform } = __nccwpck_require__(2203) +const { Console } = __nccwpck_require__(4236) + +/** + * Gets the output of `console.table(…)` as a string. + */ +module.exports = class PendingInterceptorsFormatter { + constructor ({ disableColors } = {}) { + this.transform = new Transform({ + transform (chunk, _enc, cb) { + cb(null, chunk) + } + }) + + this.logger = new Console({ + stdout: this.transform, + inspectOptions: { + colors: !disableColors && !process.env.CI + } + }) + } + + format (pendingInterceptors) { + const withPrettyHeaders = pendingInterceptors.map( + ({ method, path, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + Method: method, + Origin: origin, + Path: path, + 'Status code': statusCode, + Persistent: persist ? '✅' : '❌', + Invocations: timesInvoked, + Remaining: persist ? Infinity : times - timesInvoked + })) + + this.logger.table(withPrettyHeaders) + return this.transform.read().toString() + } +} + + +/***/ }), + +/***/ 1529: +/***/ ((module) => { + +"use strict"; + + +const singulars = { + pronoun: 'it', + is: 'is', + was: 'was', + this: 'this' +} + +const plurals = { + pronoun: 'they', + is: 'are', + was: 'were', + this: 'these' +} + +module.exports = class Pluralizer { + constructor (singular, plural) { + this.singular = singular + this.plural = plural + } + + pluralize (count) { + const one = count === 1 + const keys = one ? singulars : plurals + const noun = one ? this.singular : this.plural + return { ...keys, count, noun } + } +} + + +/***/ }), + +/***/ 4869: +/***/ ((module) => { + +"use strict"; +/* eslint-disable */ + + + +// Extracted from node/lib/internal/fixed_queue.js + +// Currently optimal queue size, tested on V8 6.0 - 6.6. Must be power of two. +const kSize = 2048; +const kMask = kSize - 1; + +// The FixedQueue is implemented as a singly-linked list of fixed-size +// circular buffers. It looks something like this: +// +// head tail +// | | +// v v +// +-----------+ <-----\ +-----------+ <------\ +-----------+ +// | [null] | \----- | next | \------- | next | +// +-----------+ +-----------+ +-----------+ +// | item | <-- bottom | item | <-- bottom | [empty] | +// | item | | item | | [empty] | +// | item | | item | | [empty] | +// | item | | item | | [empty] | +// | item | | item | bottom --> | item | +// | item | | item | | item | +// | ... | | ... | | ... | +// | item | | item | | item | +// | item | | item | | item | +// | [empty] | <-- top | item | | item | +// | [empty] | | item | | item | +// | [empty] | | [empty] | <-- top top --> | [empty] | +// +-----------+ +-----------+ +-----------+ +// +// Or, if there is only one circular buffer, it looks something +// like either of these: +// +// head tail head tail +// | | | | +// v v v v +// +-----------+ +-----------+ +// | [null] | | [null] | +// +-----------+ +-----------+ +// | [empty] | | item | +// | [empty] | | item | +// | item | <-- bottom top --> | [empty] | +// | item | | [empty] | +// | [empty] | <-- top bottom --> | item | +// | [empty] | | item | +// +-----------+ +-----------+ +// +// Adding a value means moving `top` forward by one, removing means +// moving `bottom` forward by one. After reaching the end, the queue +// wraps around. +// +// When `top === bottom` the current queue is empty and when +// `top + 1 === bottom` it's full. This wastes a single space of storage +// but allows much quicker checks. + +class FixedCircularBuffer { + constructor() { + this.bottom = 0; + this.top = 0; + this.list = new Array(kSize); + this.next = null; + } + + isEmpty() { + return this.top === this.bottom; + } + + isFull() { + return ((this.top + 1) & kMask) === this.bottom; + } + + push(data) { + this.list[this.top] = data; + this.top = (this.top + 1) & kMask; + } + + shift() { + const nextItem = this.list[this.bottom]; + if (nextItem === undefined) + return null; + this.list[this.bottom] = undefined; + this.bottom = (this.bottom + 1) & kMask; + return nextItem; + } +} + +module.exports = class FixedQueue { + constructor() { + this.head = this.tail = new FixedCircularBuffer(); + } + + isEmpty() { + return this.head.isEmpty(); + } + + push(data) { + if (this.head.isFull()) { + // Head is full: Creates a new queue, sets the old queue's `.next` to it, + // and sets it as the new main queue. + this.head = this.head.next = new FixedCircularBuffer(); + } + this.head.push(data); + } + + shift() { + const tail = this.tail; + const next = tail.shift(); + if (tail.isEmpty() && tail.next !== null) { + // If there is another queue, it forms the new tail. + this.tail = tail.next; + } + return next; + } +}; + + +/***/ }), + +/***/ 8640: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const DispatcherBase = __nccwpck_require__(1) +const FixedQueue = __nccwpck_require__(4869) +const { kConnected, kSize, kRunning, kPending, kQueued, kBusy, kFree, kUrl, kClose, kDestroy, kDispatch } = __nccwpck_require__(6443) +const PoolStats = __nccwpck_require__(4622) + +const kClients = Symbol('clients') +const kNeedDrain = Symbol('needDrain') +const kQueue = Symbol('queue') +const kClosedResolve = Symbol('closed resolve') +const kOnDrain = Symbol('onDrain') +const kOnConnect = Symbol('onConnect') +const kOnDisconnect = Symbol('onDisconnect') +const kOnConnectionError = Symbol('onConnectionError') +const kGetDispatcher = Symbol('get dispatcher') +const kAddClient = Symbol('add client') +const kRemoveClient = Symbol('remove client') +const kStats = Symbol('stats') + +class PoolBase extends DispatcherBase { + constructor () { + super() + + this[kQueue] = new FixedQueue() + this[kClients] = [] + this[kQueued] = 0 + + const pool = this + + this[kOnDrain] = function onDrain (origin, targets) { + const queue = pool[kQueue] + + let needDrain = false + + while (!needDrain) { + const item = queue.shift() + if (!item) { + break + } + pool[kQueued]-- + needDrain = !this.dispatch(item.opts, item.handler) + } + + this[kNeedDrain] = needDrain + + if (!this[kNeedDrain] && pool[kNeedDrain]) { + pool[kNeedDrain] = false + pool.emit('drain', origin, [pool, ...targets]) + } + + if (pool[kClosedResolve] && queue.isEmpty()) { + Promise + .all(pool[kClients].map(c => c.close())) + .then(pool[kClosedResolve]) + } + } + + this[kOnConnect] = (origin, targets) => { + pool.emit('connect', origin, [pool, ...targets]) + } + + this[kOnDisconnect] = (origin, targets, err) => { + pool.emit('disconnect', origin, [pool, ...targets], err) + } + + this[kOnConnectionError] = (origin, targets, err) => { + pool.emit('connectionError', origin, [pool, ...targets], err) + } + + this[kStats] = new PoolStats(this) + } + + get [kBusy] () { + return this[kNeedDrain] + } + + get [kConnected] () { + return this[kClients].filter(client => client[kConnected]).length + } + + get [kFree] () { + return this[kClients].filter(client => client[kConnected] && !client[kNeedDrain]).length + } + + get [kPending] () { + let ret = this[kQueued] + for (const { [kPending]: pending } of this[kClients]) { + ret += pending + } + return ret + } + + get [kRunning] () { + let ret = 0 + for (const { [kRunning]: running } of this[kClients]) { + ret += running + } + return ret + } + + get [kSize] () { + let ret = this[kQueued] + for (const { [kSize]: size } of this[kClients]) { + ret += size + } + return ret + } + + get stats () { + return this[kStats] + } + + async [kClose] () { + if (this[kQueue].isEmpty()) { + return Promise.all(this[kClients].map(c => c.close())) + } else { + return new Promise((resolve) => { + this[kClosedResolve] = resolve + }) + } + } + + async [kDestroy] (err) { + while (true) { + const item = this[kQueue].shift() + if (!item) { + break + } + item.handler.onError(err) + } + + return Promise.all(this[kClients].map(c => c.destroy(err))) + } + + [kDispatch] (opts, handler) { + const dispatcher = this[kGetDispatcher]() + + if (!dispatcher) { + this[kNeedDrain] = true + this[kQueue].push({ opts, handler }) + this[kQueued]++ + } else if (!dispatcher.dispatch(opts, handler)) { + dispatcher[kNeedDrain] = true + this[kNeedDrain] = !this[kGetDispatcher]() + } + + return !this[kNeedDrain] + } + + [kAddClient] (client) { + client + .on('drain', this[kOnDrain]) + .on('connect', this[kOnConnect]) + .on('disconnect', this[kOnDisconnect]) + .on('connectionError', this[kOnConnectionError]) + + this[kClients].push(client) + + if (this[kNeedDrain]) { + process.nextTick(() => { + if (this[kNeedDrain]) { + this[kOnDrain](client[kUrl], [this, client]) + } + }) + } + + return this + } + + [kRemoveClient] (client) { + client.close(() => { + const idx = this[kClients].indexOf(client) + if (idx !== -1) { + this[kClients].splice(idx, 1) + } + }) + + this[kNeedDrain] = this[kClients].some(dispatcher => ( + !dispatcher[kNeedDrain] && + dispatcher.closed !== true && + dispatcher.destroyed !== true + )) + } +} + +module.exports = { + PoolBase, + kClients, + kNeedDrain, + kAddClient, + kRemoveClient, + kGetDispatcher +} + + +/***/ }), + +/***/ 4622: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +const { kFree, kConnected, kPending, kQueued, kRunning, kSize } = __nccwpck_require__(6443) +const kPool = Symbol('pool') + +class PoolStats { + constructor (pool) { + this[kPool] = pool + } + + get connected () { + return this[kPool][kConnected] + } + + get free () { + return this[kPool][kFree] + } + + get pending () { + return this[kPool][kPending] + } + + get queued () { + return this[kPool][kQueued] + } + + get running () { + return this[kPool][kRunning] + } + + get size () { + return this[kPool][kSize] + } +} + +module.exports = PoolStats + + +/***/ }), + +/***/ 5076: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { + PoolBase, + kClients, + kNeedDrain, + kAddClient, + kGetDispatcher +} = __nccwpck_require__(8640) +const Client = __nccwpck_require__(6197) +const { + InvalidArgumentError +} = __nccwpck_require__(8707) +const util = __nccwpck_require__(3440) +const { kUrl, kInterceptors } = __nccwpck_require__(6443) +const buildConnector = __nccwpck_require__(9136) + +const kOptions = Symbol('options') +const kConnections = Symbol('connections') +const kFactory = Symbol('factory') + +function defaultFactory (origin, opts) { + return new Client(origin, opts) +} + +class Pool extends PoolBase { + constructor (origin, { + connections, + factory = defaultFactory, + connect, + connectTimeout, + tls, + maxCachedSessions, + socketPath, + autoSelectFamily, + autoSelectFamilyAttemptTimeout, + allowH2, + ...options + } = {}) { + super() + + if (connections != null && (!Number.isFinite(connections) || connections < 0)) { + throw new InvalidArgumentError('invalid connections') + } + + if (typeof factory !== 'function') { + throw new InvalidArgumentError('factory must be a function.') + } + + if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { + throw new InvalidArgumentError('connect must be a function or an object') + } + + if (typeof connect !== 'function') { + connect = buildConnector({ + ...tls, + maxCachedSessions, + allowH2, + socketPath, + timeout: connectTimeout, + ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined), + ...connect + }) + } + + this[kInterceptors] = options.interceptors && options.interceptors.Pool && Array.isArray(options.interceptors.Pool) + ? options.interceptors.Pool + : [] + this[kConnections] = connections || null + this[kUrl] = util.parseOrigin(origin) + this[kOptions] = { ...util.deepClone(options), connect, allowH2 } + this[kOptions].interceptors = options.interceptors + ? { ...options.interceptors } + : undefined + this[kFactory] = factory + } + + [kGetDispatcher] () { + let dispatcher = this[kClients].find(dispatcher => !dispatcher[kNeedDrain]) + + if (dispatcher) { + return dispatcher + } + + if (!this[kConnections] || this[kClients].length < this[kConnections]) { + dispatcher = this[kFactory](this[kUrl], this[kOptions]) + this[kAddClient](dispatcher) + } + + return dispatcher + } +} + +module.exports = Pool + + +/***/ }), + +/***/ 2720: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { kProxy, kClose, kDestroy, kInterceptors } = __nccwpck_require__(6443) +const { URL } = __nccwpck_require__(7016) +const Agent = __nccwpck_require__(9965) +const Pool = __nccwpck_require__(5076) +const DispatcherBase = __nccwpck_require__(1) +const { InvalidArgumentError, RequestAbortedError } = __nccwpck_require__(8707) +const buildConnector = __nccwpck_require__(9136) + +const kAgent = Symbol('proxy agent') +const kClient = Symbol('proxy client') +const kProxyHeaders = Symbol('proxy headers') +const kRequestTls = Symbol('request tls settings') +const kProxyTls = Symbol('proxy tls settings') +const kConnectEndpoint = Symbol('connect endpoint function') + +function defaultProtocolPort (protocol) { + return protocol === 'https:' ? 443 : 80 +} + +function buildProxyOptions (opts) { + if (typeof opts === 'string') { + opts = { uri: opts } + } + + if (!opts || !opts.uri) { + throw new InvalidArgumentError('Proxy opts.uri is mandatory') + } + + return { + uri: opts.uri, + protocol: opts.protocol || 'https' + } +} + +function defaultFactory (origin, opts) { + return new Pool(origin, opts) +} + +class ProxyAgent extends DispatcherBase { + constructor (opts) { + super(opts) + this[kProxy] = buildProxyOptions(opts) + this[kAgent] = new Agent(opts) + this[kInterceptors] = opts.interceptors && opts.interceptors.ProxyAgent && Array.isArray(opts.interceptors.ProxyAgent) + ? opts.interceptors.ProxyAgent + : [] + + if (typeof opts === 'string') { + opts = { uri: opts } + } + + if (!opts || !opts.uri) { + throw new InvalidArgumentError('Proxy opts.uri is mandatory') + } + + const { clientFactory = defaultFactory } = opts + + if (typeof clientFactory !== 'function') { + throw new InvalidArgumentError('Proxy opts.clientFactory must be a function.') + } + + this[kRequestTls] = opts.requestTls + this[kProxyTls] = opts.proxyTls + this[kProxyHeaders] = opts.headers || {} + + const resolvedUrl = new URL(opts.uri) + const { origin, port, host, username, password } = resolvedUrl + + if (opts.auth && opts.token) { + throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token') + } else if (opts.auth) { + /* @deprecated in favour of opts.token */ + this[kProxyHeaders]['proxy-authorization'] = `Basic ${opts.auth}` + } else if (opts.token) { + this[kProxyHeaders]['proxy-authorization'] = opts.token + } else if (username && password) { + this[kProxyHeaders]['proxy-authorization'] = `Basic ${Buffer.from(`${decodeURIComponent(username)}:${decodeURIComponent(password)}`).toString('base64')}` + } + + const connect = buildConnector({ ...opts.proxyTls }) + this[kConnectEndpoint] = buildConnector({ ...opts.requestTls }) + this[kClient] = clientFactory(resolvedUrl, { connect }) + this[kAgent] = new Agent({ + ...opts, + connect: async (opts, callback) => { + let requestedHost = opts.host + if (!opts.port) { + requestedHost += `:${defaultProtocolPort(opts.protocol)}` + } + try { + const { socket, statusCode } = await this[kClient].connect({ + origin, + port, + path: requestedHost, + signal: opts.signal, + headers: { + ...this[kProxyHeaders], + host + } + }) + if (statusCode !== 200) { + socket.on('error', () => {}).destroy() + callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`)) + } + if (opts.protocol !== 'https:') { + callback(null, socket) + return + } + let servername + if (this[kRequestTls]) { + servername = this[kRequestTls].servername + } else { + servername = opts.servername + } + this[kConnectEndpoint]({ ...opts, servername, httpSocket: socket }, callback) + } catch (err) { + callback(err) + } + } + }) + } + + dispatch (opts, handler) { + const { host } = new URL(opts.origin) + const headers = buildHeaders(opts.headers) + throwIfProxyAuthIsSent(headers) + return this[kAgent].dispatch( + { + ...opts, + headers: { + ...headers, + host + } + }, + handler + ) + } + + async [kClose] () { + await this[kAgent].close() + await this[kClient].close() + } + + async [kDestroy] () { + await this[kAgent].destroy() + await this[kClient].destroy() + } +} + +/** + * @param {string[] | Record} headers + * @returns {Record} + */ +function buildHeaders (headers) { + // When using undici.fetch, the headers list is stored + // as an array. + if (Array.isArray(headers)) { + /** @type {Record} */ + const headersPair = {} + + for (let i = 0; i < headers.length; i += 2) { + headersPair[headers[i]] = headers[i + 1] + } + + return headersPair + } + + return headers +} + +/** + * @param {Record} headers + * + * Previous versions of ProxyAgent suggests the Proxy-Authorization in request headers + * Nevertheless, it was changed and to avoid a security vulnerability by end users + * this check was created. + * It should be removed in the next major version for performance reasons + */ +function throwIfProxyAuthIsSent (headers) { + const existProxyAuth = headers && Object.keys(headers) + .find((key) => key.toLowerCase() === 'proxy-authorization') + if (existProxyAuth) { + throw new InvalidArgumentError('Proxy-Authorization should be sent in ProxyAgent constructor') + } +} + +module.exports = ProxyAgent + + +/***/ }), + +/***/ 8804: +/***/ ((module) => { + +"use strict"; + + +let fastNow = Date.now() +let fastNowTimeout + +const fastTimers = [] + +function onTimeout () { + fastNow = Date.now() + + let len = fastTimers.length + let idx = 0 + while (idx < len) { + const timer = fastTimers[idx] + + if (timer.state === 0) { + timer.state = fastNow + timer.delay + } else if (timer.state > 0 && fastNow >= timer.state) { + timer.state = -1 + timer.callback(timer.opaque) + } + + if (timer.state === -1) { + timer.state = -2 + if (idx !== len - 1) { + fastTimers[idx] = fastTimers.pop() + } else { + fastTimers.pop() + } + len -= 1 + } else { + idx += 1 + } + } + + if (fastTimers.length > 0) { + refreshTimeout() + } +} + +function refreshTimeout () { + if (fastNowTimeout && fastNowTimeout.refresh) { + fastNowTimeout.refresh() + } else { + clearTimeout(fastNowTimeout) + fastNowTimeout = setTimeout(onTimeout, 1e3) + if (fastNowTimeout.unref) { + fastNowTimeout.unref() + } + } +} + +class Timeout { + constructor (callback, delay, opaque) { + this.callback = callback + this.delay = delay + this.opaque = opaque + + // -2 not in timer list + // -1 in timer list but inactive + // 0 in timer list waiting for time + // > 0 in timer list waiting for time to expire + this.state = -2 + + this.refresh() + } + + refresh () { + if (this.state === -2) { + fastTimers.push(this) + if (!fastNowTimeout || fastTimers.length === 1) { + refreshTimeout() + } + } + + this.state = 0 + } + + clear () { + this.state = -1 + } +} + +module.exports = { + setTimeout (callback, delay, opaque) { + return delay < 1e3 + ? setTimeout(callback, delay, opaque) + : new Timeout(callback, delay, opaque) + }, + clearTimeout (timeout) { + if (timeout instanceof Timeout) { + timeout.clear() + } else { + clearTimeout(timeout) + } + } +} + + +/***/ }), + +/***/ 8550: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const diagnosticsChannel = __nccwpck_require__(1637) +const { uid, states } = __nccwpck_require__(5913) +const { + kReadyState, + kSentClose, + kByteParser, + kReceivedClose +} = __nccwpck_require__(2933) +const { fireEvent, failWebsocketConnection } = __nccwpck_require__(3574) +const { CloseEvent } = __nccwpck_require__(6255) +const { makeRequest } = __nccwpck_require__(5194) +const { fetching } = __nccwpck_require__(2315) +const { Headers } = __nccwpck_require__(6349) +const { getGlobalDispatcher } = __nccwpck_require__(2581) +const { kHeadersList } = __nccwpck_require__(6443) + +const channels = {} +channels.open = diagnosticsChannel.channel('undici:websocket:open') +channels.close = diagnosticsChannel.channel('undici:websocket:close') +channels.socketError = diagnosticsChannel.channel('undici:websocket:socket_error') + +/** @type {import('crypto')} */ +let crypto +try { + crypto = __nccwpck_require__(6982) +} catch { + +} + +/** + * @see https://websockets.spec.whatwg.org/#concept-websocket-establish + * @param {URL} url + * @param {string|string[]} protocols + * @param {import('./websocket').WebSocket} ws + * @param {(response: any) => void} onEstablish + * @param {Partial} options + */ +function establishWebSocketConnection (url, protocols, ws, onEstablish, options) { + // 1. Let requestURL be a copy of url, with its scheme set to "http", if url’s + // scheme is "ws", and to "https" otherwise. + const requestURL = url + + requestURL.protocol = url.protocol === 'ws:' ? 'http:' : 'https:' + + // 2. Let request be a new request, whose URL is requestURL, client is client, + // service-workers mode is "none", referrer is "no-referrer", mode is + // "websocket", credentials mode is "include", cache mode is "no-store" , + // and redirect mode is "error". + const request = makeRequest({ + urlList: [requestURL], + serviceWorkers: 'none', + referrer: 'no-referrer', + mode: 'websocket', + credentials: 'include', + cache: 'no-store', + redirect: 'error' + }) + + // Note: undici extension, allow setting custom headers. + if (options.headers) { + const headersList = new Headers(options.headers)[kHeadersList] + + request.headersList = headersList + } + + // 3. Append (`Upgrade`, `websocket`) to request’s header list. + // 4. Append (`Connection`, `Upgrade`) to request’s header list. + // Note: both of these are handled by undici currently. + // https://github.com/nodejs/undici/blob/68c269c4144c446f3f1220951338daef4a6b5ec4/lib/client.js#L1397 + + // 5. Let keyValue be a nonce consisting of a randomly selected + // 16-byte value that has been forgiving-base64-encoded and + // isomorphic encoded. + const keyValue = crypto.randomBytes(16).toString('base64') + + // 6. Append (`Sec-WebSocket-Key`, keyValue) to request’s + // header list. + request.headersList.append('sec-websocket-key', keyValue) + + // 7. Append (`Sec-WebSocket-Version`, `13`) to request’s + // header list. + request.headersList.append('sec-websocket-version', '13') + + // 8. For each protocol in protocols, combine + // (`Sec-WebSocket-Protocol`, protocol) in request’s header + // list. + for (const protocol of protocols) { + request.headersList.append('sec-websocket-protocol', protocol) + } + + // 9. Let permessageDeflate be a user-agent defined + // "permessage-deflate" extension header value. + // https://github.com/mozilla/gecko-dev/blob/ce78234f5e653a5d3916813ff990f053510227bc/netwerk/protocol/websocket/WebSocketChannel.cpp#L2673 + // TODO: enable once permessage-deflate is supported + const permessageDeflate = '' // 'permessage-deflate; 15' + + // 10. Append (`Sec-WebSocket-Extensions`, permessageDeflate) to + // request’s header list. + // request.headersList.append('sec-websocket-extensions', permessageDeflate) + + // 11. Fetch request with useParallelQueue set to true, and + // processResponse given response being these steps: + const controller = fetching({ + request, + useParallelQueue: true, + dispatcher: options.dispatcher ?? getGlobalDispatcher(), + processResponse (response) { + // 1. If response is a network error or its status is not 101, + // fail the WebSocket connection. + if (response.type === 'error' || response.status !== 101) { + failWebsocketConnection(ws, 'Received network error or non-101 status code.') + return + } + + // 2. If protocols is not the empty list and extracting header + // list values given `Sec-WebSocket-Protocol` and response’s + // header list results in null, failure, or the empty byte + // sequence, then fail the WebSocket connection. + if (protocols.length !== 0 && !response.headersList.get('Sec-WebSocket-Protocol')) { + failWebsocketConnection(ws, 'Server did not respond with sent protocols.') + return + } + + // 3. Follow the requirements stated step 2 to step 6, inclusive, + // of the last set of steps in section 4.1 of The WebSocket + // Protocol to validate response. This either results in fail + // the WebSocket connection or the WebSocket connection is + // established. + + // 2. If the response lacks an |Upgrade| header field or the |Upgrade| + // header field contains a value that is not an ASCII case- + // insensitive match for the value "websocket", the client MUST + // _Fail the WebSocket Connection_. + if (response.headersList.get('Upgrade')?.toLowerCase() !== 'websocket') { + failWebsocketConnection(ws, 'Server did not set Upgrade header to "websocket".') + return + } + + // 3. If the response lacks a |Connection| header field or the + // |Connection| header field doesn't contain a token that is an + // ASCII case-insensitive match for the value "Upgrade", the client + // MUST _Fail the WebSocket Connection_. + if (response.headersList.get('Connection')?.toLowerCase() !== 'upgrade') { + failWebsocketConnection(ws, 'Server did not set Connection header to "upgrade".') + return + } + + // 4. If the response lacks a |Sec-WebSocket-Accept| header field or + // the |Sec-WebSocket-Accept| contains a value other than the + // base64-encoded SHA-1 of the concatenation of the |Sec-WebSocket- + // Key| (as a string, not base64-decoded) with the string "258EAFA5- + // E914-47DA-95CA-C5AB0DC85B11" but ignoring any leading and + // trailing whitespace, the client MUST _Fail the WebSocket + // Connection_. + const secWSAccept = response.headersList.get('Sec-WebSocket-Accept') + const digest = crypto.createHash('sha1').update(keyValue + uid).digest('base64') + if (secWSAccept !== digest) { + failWebsocketConnection(ws, 'Incorrect hash received in Sec-WebSocket-Accept header.') + return + } + + // 5. If the response includes a |Sec-WebSocket-Extensions| header + // field and this header field indicates the use of an extension + // that was not present in the client's handshake (the server has + // indicated an extension not requested by the client), the client + // MUST _Fail the WebSocket Connection_. (The parsing of this + // header field to determine which extensions are requested is + // discussed in Section 9.1.) + const secExtension = response.headersList.get('Sec-WebSocket-Extensions') + + if (secExtension !== null && secExtension !== permessageDeflate) { + failWebsocketConnection(ws, 'Received different permessage-deflate than the one set.') + return + } + + // 6. If the response includes a |Sec-WebSocket-Protocol| header field + // and this header field indicates the use of a subprotocol that was + // not present in the client's handshake (the server has indicated a + // subprotocol not requested by the client), the client MUST _Fail + // the WebSocket Connection_. + const secProtocol = response.headersList.get('Sec-WebSocket-Protocol') + + if (secProtocol !== null && secProtocol !== request.headersList.get('Sec-WebSocket-Protocol')) { + failWebsocketConnection(ws, 'Protocol was not set in the opening handshake.') + return + } + + response.socket.on('data', onSocketData) + response.socket.on('close', onSocketClose) + response.socket.on('error', onSocketError) + + if (channels.open.hasSubscribers) { + channels.open.publish({ + address: response.socket.address(), + protocol: secProtocol, + extensions: secExtension + }) + } + + onEstablish(response) + } + }) + + return controller +} + +/** + * @param {Buffer} chunk + */ +function onSocketData (chunk) { + if (!this.ws[kByteParser].write(chunk)) { + this.pause() + } +} + +/** + * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol + * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.4 + */ +function onSocketClose () { + const { ws } = this + + // If the TCP connection was closed after the + // WebSocket closing handshake was completed, the WebSocket connection + // is said to have been closed _cleanly_. + const wasClean = ws[kSentClose] && ws[kReceivedClose] + + let code = 1005 + let reason = '' + + const result = ws[kByteParser].closingInfo + + if (result) { + code = result.code ?? 1005 + reason = result.reason + } else if (!ws[kSentClose]) { + // If _The WebSocket + // Connection is Closed_ and no Close control frame was received by the + // endpoint (such as could occur if the underlying transport connection + // is lost), _The WebSocket Connection Close Code_ is considered to be + // 1006. + code = 1006 + } + + // 1. Change the ready state to CLOSED (3). + ws[kReadyState] = states.CLOSED + + // 2. If the user agent was required to fail the WebSocket + // connection, or if the WebSocket connection was closed + // after being flagged as full, fire an event named error + // at the WebSocket object. + // TODO + + // 3. Fire an event named close at the WebSocket object, + // using CloseEvent, with the wasClean attribute + // initialized to true if the connection closed cleanly + // and false otherwise, the code attribute initialized to + // the WebSocket connection close code, and the reason + // attribute initialized to the result of applying UTF-8 + // decode without BOM to the WebSocket connection close + // reason. + fireEvent('close', ws, CloseEvent, { + wasClean, code, reason + }) + + if (channels.close.hasSubscribers) { + channels.close.publish({ + websocket: ws, + code, + reason + }) + } +} + +function onSocketError (error) { + const { ws } = this + + ws[kReadyState] = states.CLOSING + + if (channels.socketError.hasSubscribers) { + channels.socketError.publish(error) + } + + this.destroy() +} + +module.exports = { + establishWebSocketConnection +} + + +/***/ }), + +/***/ 5913: +/***/ ((module) => { + +"use strict"; + + +// This is a Globally Unique Identifier unique used +// to validate that the endpoint accepts websocket +// connections. +// See https://www.rfc-editor.org/rfc/rfc6455.html#section-1.3 +const uid = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11' + +/** @type {PropertyDescriptor} */ +const staticPropertyDescriptors = { + enumerable: true, + writable: false, + configurable: false +} + +const states = { + CONNECTING: 0, + OPEN: 1, + CLOSING: 2, + CLOSED: 3 +} + +const opcodes = { + CONTINUATION: 0x0, + TEXT: 0x1, + BINARY: 0x2, + CLOSE: 0x8, + PING: 0x9, + PONG: 0xA +} + +const maxUnsigned16Bit = 2 ** 16 - 1 // 65535 + +const parserStates = { + INFO: 0, + PAYLOADLENGTH_16: 2, + PAYLOADLENGTH_64: 3, + READ_DATA: 4 +} + +const emptyBuffer = Buffer.allocUnsafe(0) + +module.exports = { + uid, + staticPropertyDescriptors, + states, + opcodes, + maxUnsigned16Bit, + parserStates, + emptyBuffer +} + + +/***/ }), + +/***/ 6255: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { webidl } = __nccwpck_require__(4222) +const { kEnumerableProperty } = __nccwpck_require__(3440) +const { MessagePort } = __nccwpck_require__(8167) + +/** + * @see https://html.spec.whatwg.org/multipage/comms.html#messageevent + */ +class MessageEvent extends Event { + #eventInit + + constructor (type, eventInitDict = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent constructor' }) + + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.MessageEventInit(eventInitDict) + + super(type, eventInitDict) + + this.#eventInit = eventInitDict + } + + get data () { + webidl.brandCheck(this, MessageEvent) + + return this.#eventInit.data + } + + get origin () { + webidl.brandCheck(this, MessageEvent) + + return this.#eventInit.origin + } + + get lastEventId () { + webidl.brandCheck(this, MessageEvent) + + return this.#eventInit.lastEventId + } + + get source () { + webidl.brandCheck(this, MessageEvent) + + return this.#eventInit.source + } + + get ports () { + webidl.brandCheck(this, MessageEvent) + + if (!Object.isFrozen(this.#eventInit.ports)) { + Object.freeze(this.#eventInit.ports) + } + + return this.#eventInit.ports + } + + initMessageEvent ( + type, + bubbles = false, + cancelable = false, + data = null, + origin = '', + lastEventId = '', + source = null, + ports = [] + ) { + webidl.brandCheck(this, MessageEvent) + + webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent.initMessageEvent' }) + + return new MessageEvent(type, { + bubbles, cancelable, data, origin, lastEventId, source, ports + }) + } +} + +/** + * @see https://websockets.spec.whatwg.org/#the-closeevent-interface + */ +class CloseEvent extends Event { + #eventInit + + constructor (type, eventInitDict = {}) { + webidl.argumentLengthCheck(arguments, 1, { header: 'CloseEvent constructor' }) + + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.CloseEventInit(eventInitDict) + + super(type, eventInitDict) + + this.#eventInit = eventInitDict + } + + get wasClean () { + webidl.brandCheck(this, CloseEvent) + + return this.#eventInit.wasClean + } + + get code () { + webidl.brandCheck(this, CloseEvent) + + return this.#eventInit.code + } + + get reason () { + webidl.brandCheck(this, CloseEvent) + + return this.#eventInit.reason + } +} + +// https://html.spec.whatwg.org/multipage/webappapis.html#the-errorevent-interface +class ErrorEvent extends Event { + #eventInit + + constructor (type, eventInitDict) { + webidl.argumentLengthCheck(arguments, 1, { header: 'ErrorEvent constructor' }) + + super(type, eventInitDict) + + type = webidl.converters.DOMString(type) + eventInitDict = webidl.converters.ErrorEventInit(eventInitDict ?? {}) + + this.#eventInit = eventInitDict + } + + get message () { + webidl.brandCheck(this, ErrorEvent) + + return this.#eventInit.message + } + + get filename () { + webidl.brandCheck(this, ErrorEvent) + + return this.#eventInit.filename + } + + get lineno () { + webidl.brandCheck(this, ErrorEvent) + + return this.#eventInit.lineno + } + + get colno () { + webidl.brandCheck(this, ErrorEvent) + + return this.#eventInit.colno + } + + get error () { + webidl.brandCheck(this, ErrorEvent) + + return this.#eventInit.error + } +} + +Object.defineProperties(MessageEvent.prototype, { + [Symbol.toStringTag]: { + value: 'MessageEvent', + configurable: true + }, + data: kEnumerableProperty, + origin: kEnumerableProperty, + lastEventId: kEnumerableProperty, + source: kEnumerableProperty, + ports: kEnumerableProperty, + initMessageEvent: kEnumerableProperty +}) + +Object.defineProperties(CloseEvent.prototype, { + [Symbol.toStringTag]: { + value: 'CloseEvent', + configurable: true + }, + reason: kEnumerableProperty, + code: kEnumerableProperty, + wasClean: kEnumerableProperty +}) + +Object.defineProperties(ErrorEvent.prototype, { + [Symbol.toStringTag]: { + value: 'ErrorEvent', + configurable: true + }, + message: kEnumerableProperty, + filename: kEnumerableProperty, + lineno: kEnumerableProperty, + colno: kEnumerableProperty, + error: kEnumerableProperty +}) + +webidl.converters.MessagePort = webidl.interfaceConverter(MessagePort) + +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.MessagePort +) + +const eventInit = [ + { + key: 'bubbles', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'cancelable', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'composed', + converter: webidl.converters.boolean, + defaultValue: false + } +] + +webidl.converters.MessageEventInit = webidl.dictionaryConverter([ + ...eventInit, + { + key: 'data', + converter: webidl.converters.any, + defaultValue: null + }, + { + key: 'origin', + converter: webidl.converters.USVString, + defaultValue: '' + }, + { + key: 'lastEventId', + converter: webidl.converters.DOMString, + defaultValue: '' + }, + { + key: 'source', + // Node doesn't implement WindowProxy or ServiceWorker, so the only + // valid value for source is a MessagePort. + converter: webidl.nullableConverter(webidl.converters.MessagePort), + defaultValue: null + }, + { + key: 'ports', + converter: webidl.converters['sequence'], + get defaultValue () { + return [] + } + } +]) + +webidl.converters.CloseEventInit = webidl.dictionaryConverter([ + ...eventInit, + { + key: 'wasClean', + converter: webidl.converters.boolean, + defaultValue: false + }, + { + key: 'code', + converter: webidl.converters['unsigned short'], + defaultValue: 0 + }, + { + key: 'reason', + converter: webidl.converters.USVString, + defaultValue: '' + } +]) + +webidl.converters.ErrorEventInit = webidl.dictionaryConverter([ + ...eventInit, + { + key: 'message', + converter: webidl.converters.DOMString, + defaultValue: '' + }, + { + key: 'filename', + converter: webidl.converters.USVString, + defaultValue: '' + }, + { + key: 'lineno', + converter: webidl.converters['unsigned long'], + defaultValue: 0 + }, + { + key: 'colno', + converter: webidl.converters['unsigned long'], + defaultValue: 0 + }, + { + key: 'error', + converter: webidl.converters.any + } +]) + +module.exports = { + MessageEvent, + CloseEvent, + ErrorEvent +} + + +/***/ }), + +/***/ 1237: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { maxUnsigned16Bit } = __nccwpck_require__(5913) + +/** @type {import('crypto')} */ +let crypto +try { + crypto = __nccwpck_require__(6982) +} catch { + +} + +class WebsocketFrameSend { + /** + * @param {Buffer|undefined} data + */ + constructor (data) { + this.frameData = data + this.maskKey = crypto.randomBytes(4) + } + + createFrame (opcode) { + const bodyLength = this.frameData?.byteLength ?? 0 + + /** @type {number} */ + let payloadLength = bodyLength // 0-125 + let offset = 6 + + if (bodyLength > maxUnsigned16Bit) { + offset += 8 // payload length is next 8 bytes + payloadLength = 127 + } else if (bodyLength > 125) { + offset += 2 // payload length is next 2 bytes + payloadLength = 126 + } + + const buffer = Buffer.allocUnsafe(bodyLength + offset) + + // Clear first 2 bytes, everything else is overwritten + buffer[0] = buffer[1] = 0 + buffer[0] |= 0x80 // FIN + buffer[0] = (buffer[0] & 0xF0) + opcode // opcode + + /*! ws. MIT License. Einar Otto Stangvik */ + buffer[offset - 4] = this.maskKey[0] + buffer[offset - 3] = this.maskKey[1] + buffer[offset - 2] = this.maskKey[2] + buffer[offset - 1] = this.maskKey[3] + + buffer[1] = payloadLength + + if (payloadLength === 126) { + buffer.writeUInt16BE(bodyLength, 2) + } else if (payloadLength === 127) { + // Clear extended payload length + buffer[2] = buffer[3] = 0 + buffer.writeUIntBE(bodyLength, 4, 6) + } + + buffer[1] |= 0x80 // MASK + + // mask body + for (let i = 0; i < bodyLength; i++) { + buffer[offset + i] = this.frameData[i] ^ this.maskKey[i % 4] + } + + return buffer + } +} + +module.exports = { + WebsocketFrameSend +} + + +/***/ }), + +/***/ 3171: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { Writable } = __nccwpck_require__(2203) +const diagnosticsChannel = __nccwpck_require__(1637) +const { parserStates, opcodes, states, emptyBuffer } = __nccwpck_require__(5913) +const { kReadyState, kSentClose, kResponse, kReceivedClose } = __nccwpck_require__(2933) +const { isValidStatusCode, failWebsocketConnection, websocketMessageReceived } = __nccwpck_require__(3574) +const { WebsocketFrameSend } = __nccwpck_require__(1237) + +// This code was influenced by ws released under the MIT license. +// Copyright (c) 2011 Einar Otto Stangvik +// Copyright (c) 2013 Arnout Kazemier and contributors +// Copyright (c) 2016 Luigi Pinca and contributors + +const channels = {} +channels.ping = diagnosticsChannel.channel('undici:websocket:ping') +channels.pong = diagnosticsChannel.channel('undici:websocket:pong') + +class ByteParser extends Writable { + #buffers = [] + #byteOffset = 0 + + #state = parserStates.INFO + + #info = {} + #fragments = [] + + constructor (ws) { + super() + + this.ws = ws + } + + /** + * @param {Buffer} chunk + * @param {() => void} callback + */ + _write (chunk, _, callback) { + this.#buffers.push(chunk) + this.#byteOffset += chunk.length + + this.run(callback) + } + + /** + * Runs whenever a new chunk is received. + * Callback is called whenever there are no more chunks buffering, + * or not enough bytes are buffered to parse. + */ + run (callback) { + while (true) { + if (this.#state === parserStates.INFO) { + // If there aren't enough bytes to parse the payload length, etc. + if (this.#byteOffset < 2) { + return callback() + } + + const buffer = this.consume(2) + + this.#info.fin = (buffer[0] & 0x80) !== 0 + this.#info.opcode = buffer[0] & 0x0F + + // If we receive a fragmented message, we use the type of the first + // frame to parse the full message as binary/text, when it's terminated + this.#info.originalOpcode ??= this.#info.opcode + + this.#info.fragmented = !this.#info.fin && this.#info.opcode !== opcodes.CONTINUATION + + if (this.#info.fragmented && this.#info.opcode !== opcodes.BINARY && this.#info.opcode !== opcodes.TEXT) { + // Only text and binary frames can be fragmented + failWebsocketConnection(this.ws, 'Invalid frame type was fragmented.') + return + } + + const payloadLength = buffer[1] & 0x7F + + if (payloadLength <= 125) { + this.#info.payloadLength = payloadLength + this.#state = parserStates.READ_DATA + } else if (payloadLength === 126) { + this.#state = parserStates.PAYLOADLENGTH_16 + } else if (payloadLength === 127) { + this.#state = parserStates.PAYLOADLENGTH_64 + } + + if (this.#info.fragmented && payloadLength > 125) { + // A fragmented frame can't be fragmented itself + failWebsocketConnection(this.ws, 'Fragmented frame exceeded 125 bytes.') + return + } else if ( + (this.#info.opcode === opcodes.PING || + this.#info.opcode === opcodes.PONG || + this.#info.opcode === opcodes.CLOSE) && + payloadLength > 125 + ) { + // Control frames can have a payload length of 125 bytes MAX + failWebsocketConnection(this.ws, 'Payload length for control frame exceeded 125 bytes.') + return + } else if (this.#info.opcode === opcodes.CLOSE) { + if (payloadLength === 1) { + failWebsocketConnection(this.ws, 'Received close frame with a 1-byte body.') + return + } + + const body = this.consume(payloadLength) + + this.#info.closeInfo = this.parseCloseBody(false, body) + + if (!this.ws[kSentClose]) { + // If an endpoint receives a Close frame and did not previously send a + // Close frame, the endpoint MUST send a Close frame in response. (When + // sending a Close frame in response, the endpoint typically echos the + // status code it received.) + const body = Buffer.allocUnsafe(2) + body.writeUInt16BE(this.#info.closeInfo.code, 0) + const closeFrame = new WebsocketFrameSend(body) + + this.ws[kResponse].socket.write( + closeFrame.createFrame(opcodes.CLOSE), + (err) => { + if (!err) { + this.ws[kSentClose] = true + } + } + ) + } + + // Upon either sending or receiving a Close control frame, it is said + // that _The WebSocket Closing Handshake is Started_ and that the + // WebSocket connection is in the CLOSING state. + this.ws[kReadyState] = states.CLOSING + this.ws[kReceivedClose] = true + + this.end() + + return + } else if (this.#info.opcode === opcodes.PING) { + // Upon receipt of a Ping frame, an endpoint MUST send a Pong frame in + // response, unless it already received a Close frame. + // A Pong frame sent in response to a Ping frame must have identical + // "Application data" + + const body = this.consume(payloadLength) + + if (!this.ws[kReceivedClose]) { + const frame = new WebsocketFrameSend(body) + + this.ws[kResponse].socket.write(frame.createFrame(opcodes.PONG)) + + if (channels.ping.hasSubscribers) { + channels.ping.publish({ + payload: body + }) + } + } + + this.#state = parserStates.INFO + + if (this.#byteOffset > 0) { + continue + } else { + callback() + return + } + } else if (this.#info.opcode === opcodes.PONG) { + // A Pong frame MAY be sent unsolicited. This serves as a + // unidirectional heartbeat. A response to an unsolicited Pong frame is + // not expected. + + const body = this.consume(payloadLength) + + if (channels.pong.hasSubscribers) { + channels.pong.publish({ + payload: body + }) + } + + if (this.#byteOffset > 0) { + continue + } else { + callback() + return + } + } + } else if (this.#state === parserStates.PAYLOADLENGTH_16) { + if (this.#byteOffset < 2) { + return callback() + } + + const buffer = this.consume(2) + + this.#info.payloadLength = buffer.readUInt16BE(0) + this.#state = parserStates.READ_DATA + } else if (this.#state === parserStates.PAYLOADLENGTH_64) { + if (this.#byteOffset < 8) { + return callback() + } + + const buffer = this.consume(8) + const upper = buffer.readUInt32BE(0) + + // 2^31 is the maxinimum bytes an arraybuffer can contain + // on 32-bit systems. Although, on 64-bit systems, this is + // 2^53-1 bytes. + // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Errors/Invalid_array_length + // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/common/globals.h;drc=1946212ac0100668f14eb9e2843bdd846e510a1e;bpv=1;bpt=1;l=1275 + // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/objects/js-array-buffer.h;l=34;drc=1946212ac0100668f14eb9e2843bdd846e510a1e + if (upper > 2 ** 31 - 1) { + failWebsocketConnection(this.ws, 'Received payload length > 2^31 bytes.') + return + } + + const lower = buffer.readUInt32BE(4) + + this.#info.payloadLength = (upper << 8) + lower + this.#state = parserStates.READ_DATA + } else if (this.#state === parserStates.READ_DATA) { + if (this.#byteOffset < this.#info.payloadLength) { + // If there is still more data in this chunk that needs to be read + return callback() + } else if (this.#byteOffset >= this.#info.payloadLength) { + // If the server sent multiple frames in a single chunk + + const body = this.consume(this.#info.payloadLength) + + this.#fragments.push(body) + + // If the frame is unfragmented, or a fragmented frame was terminated, + // a message was received + if (!this.#info.fragmented || (this.#info.fin && this.#info.opcode === opcodes.CONTINUATION)) { + const fullMessage = Buffer.concat(this.#fragments) + + websocketMessageReceived(this.ws, this.#info.originalOpcode, fullMessage) + + this.#info = {} + this.#fragments.length = 0 + } + + this.#state = parserStates.INFO + } + } + + if (this.#byteOffset > 0) { + continue + } else { + callback() + break + } + } + } + + /** + * Take n bytes from the buffered Buffers + * @param {number} n + * @returns {Buffer|null} + */ + consume (n) { + if (n > this.#byteOffset) { + return null + } else if (n === 0) { + return emptyBuffer + } + + if (this.#buffers[0].length === n) { + this.#byteOffset -= this.#buffers[0].length + return this.#buffers.shift() + } + + const buffer = Buffer.allocUnsafe(n) + let offset = 0 + + while (offset !== n) { + const next = this.#buffers[0] + const { length } = next + + if (length + offset === n) { + buffer.set(this.#buffers.shift(), offset) + break + } else if (length + offset > n) { + buffer.set(next.subarray(0, n - offset), offset) + this.#buffers[0] = next.subarray(n - offset) + break + } else { + buffer.set(this.#buffers.shift(), offset) + offset += next.length + } + } + + this.#byteOffset -= n + + return buffer + } + + parseCloseBody (onlyCode, data) { + // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.5 + /** @type {number|undefined} */ + let code + + if (data.length >= 2) { + // _The WebSocket Connection Close Code_ is + // defined as the status code (Section 7.4) contained in the first Close + // control frame received by the application + code = data.readUInt16BE(0) + } + + if (onlyCode) { + if (!isValidStatusCode(code)) { + return null + } + + return { code } + } + + // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.6 + /** @type {Buffer} */ + let reason = data.subarray(2) + + // Remove BOM + if (reason[0] === 0xEF && reason[1] === 0xBB && reason[2] === 0xBF) { + reason = reason.subarray(3) + } + + if (code !== undefined && !isValidStatusCode(code)) { + return null + } + + try { + // TODO: optimize this + reason = new TextDecoder('utf-8', { fatal: true }).decode(reason) + } catch { + return null + } + + return { code, reason } + } + + get closingInfo () { + return this.#info.closeInfo + } +} + +module.exports = { + ByteParser +} + + +/***/ }), + +/***/ 2933: +/***/ ((module) => { + +"use strict"; + + +module.exports = { + kWebSocketURL: Symbol('url'), + kReadyState: Symbol('ready state'), + kController: Symbol('controller'), + kResponse: Symbol('response'), + kBinaryType: Symbol('binary type'), + kSentClose: Symbol('sent close'), + kReceivedClose: Symbol('received close'), + kByteParser: Symbol('byte parser') +} + + +/***/ }), + +/***/ 3574: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { kReadyState, kController, kResponse, kBinaryType, kWebSocketURL } = __nccwpck_require__(2933) +const { states, opcodes } = __nccwpck_require__(5913) +const { MessageEvent, ErrorEvent } = __nccwpck_require__(6255) + +/* globals Blob */ + +/** + * @param {import('./websocket').WebSocket} ws + */ +function isEstablished (ws) { + // If the server's response is validated as provided for above, it is + // said that _The WebSocket Connection is Established_ and that the + // WebSocket Connection is in the OPEN state. + return ws[kReadyState] === states.OPEN +} + +/** + * @param {import('./websocket').WebSocket} ws + */ +function isClosing (ws) { + // Upon either sending or receiving a Close control frame, it is said + // that _The WebSocket Closing Handshake is Started_ and that the + // WebSocket connection is in the CLOSING state. + return ws[kReadyState] === states.CLOSING +} + +/** + * @param {import('./websocket').WebSocket} ws + */ +function isClosed (ws) { + return ws[kReadyState] === states.CLOSED +} + +/** + * @see https://dom.spec.whatwg.org/#concept-event-fire + * @param {string} e + * @param {EventTarget} target + * @param {EventInit | undefined} eventInitDict + */ +function fireEvent (e, target, eventConstructor = Event, eventInitDict) { + // 1. If eventConstructor is not given, then let eventConstructor be Event. + + // 2. Let event be the result of creating an event given eventConstructor, + // in the relevant realm of target. + // 3. Initialize event’s type attribute to e. + const event = new eventConstructor(e, eventInitDict) // eslint-disable-line new-cap + + // 4. Initialize any other IDL attributes of event as described in the + // invocation of this algorithm. + + // 5. Return the result of dispatching event at target, with legacy target + // override flag set if set. + target.dispatchEvent(event) +} + +/** + * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol + * @param {import('./websocket').WebSocket} ws + * @param {number} type Opcode + * @param {Buffer} data application data + */ +function websocketMessageReceived (ws, type, data) { + // 1. If ready state is not OPEN (1), then return. + if (ws[kReadyState] !== states.OPEN) { + return + } + + // 2. Let dataForEvent be determined by switching on type and binary type: + let dataForEvent + + if (type === opcodes.TEXT) { + // -> type indicates that the data is Text + // a new DOMString containing data + try { + dataForEvent = new TextDecoder('utf-8', { fatal: true }).decode(data) + } catch { + failWebsocketConnection(ws, 'Received invalid UTF-8 in text frame.') + return + } + } else if (type === opcodes.BINARY) { + if (ws[kBinaryType] === 'blob') { + // -> type indicates that the data is Binary and binary type is "blob" + // a new Blob object, created in the relevant Realm of the WebSocket + // object, that represents data as its raw data + dataForEvent = new Blob([data]) + } else { + // -> type indicates that the data is Binary and binary type is "arraybuffer" + // a new ArrayBuffer object, created in the relevant Realm of the + // WebSocket object, whose contents are data + dataForEvent = new Uint8Array(data).buffer + } + } + + // 3. Fire an event named message at the WebSocket object, using MessageEvent, + // with the origin attribute initialized to the serialization of the WebSocket + // object’s url's origin, and the data attribute initialized to dataForEvent. + fireEvent('message', ws, MessageEvent, { + origin: ws[kWebSocketURL].origin, + data: dataForEvent + }) +} + +/** + * @see https://datatracker.ietf.org/doc/html/rfc6455 + * @see https://datatracker.ietf.org/doc/html/rfc2616 + * @see https://bugs.chromium.org/p/chromium/issues/detail?id=398407 + * @param {string} protocol + */ +function isValidSubprotocol (protocol) { + // If present, this value indicates one + // or more comma-separated subprotocol the client wishes to speak, + // ordered by preference. The elements that comprise this value + // MUST be non-empty strings with characters in the range U+0021 to + // U+007E not including separator characters as defined in + // [RFC2616] and MUST all be unique strings. + if (protocol.length === 0) { + return false + } + + for (const char of protocol) { + const code = char.charCodeAt(0) + + if ( + code < 0x21 || + code > 0x7E || + char === '(' || + char === ')' || + char === '<' || + char === '>' || + char === '@' || + char === ',' || + char === ';' || + char === ':' || + char === '\\' || + char === '"' || + char === '/' || + char === '[' || + char === ']' || + char === '?' || + char === '=' || + char === '{' || + char === '}' || + code === 32 || // SP + code === 9 // HT + ) { + return false + } + } + + return true +} + +/** + * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7-4 + * @param {number} code + */ +function isValidStatusCode (code) { + if (code >= 1000 && code < 1015) { + return ( + code !== 1004 && // reserved + code !== 1005 && // "MUST NOT be set as a status code" + code !== 1006 // "MUST NOT be set as a status code" + ) + } + + return code >= 3000 && code <= 4999 +} + +/** + * @param {import('./websocket').WebSocket} ws + * @param {string|undefined} reason + */ +function failWebsocketConnection (ws, reason) { + const { [kController]: controller, [kResponse]: response } = ws + + controller.abort() + + if (response?.socket && !response.socket.destroyed) { + response.socket.destroy() + } + + if (reason) { + fireEvent('error', ws, ErrorEvent, { + error: new Error(reason) + }) + } +} + +module.exports = { + isEstablished, + isClosing, + isClosed, + fireEvent, + isValidSubprotocol, + isValidStatusCode, + failWebsocketConnection, + websocketMessageReceived +} + + +/***/ }), + +/***/ 5171: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const { webidl } = __nccwpck_require__(4222) +const { DOMException } = __nccwpck_require__(7326) +const { URLSerializer } = __nccwpck_require__(4322) +const { getGlobalOrigin } = __nccwpck_require__(5628) +const { staticPropertyDescriptors, states, opcodes, emptyBuffer } = __nccwpck_require__(5913) +const { + kWebSocketURL, + kReadyState, + kController, + kBinaryType, + kResponse, + kSentClose, + kByteParser +} = __nccwpck_require__(2933) +const { isEstablished, isClosing, isValidSubprotocol, failWebsocketConnection, fireEvent } = __nccwpck_require__(3574) +const { establishWebSocketConnection } = __nccwpck_require__(8550) +const { WebsocketFrameSend } = __nccwpck_require__(1237) +const { ByteParser } = __nccwpck_require__(3171) +const { kEnumerableProperty, isBlobLike } = __nccwpck_require__(3440) +const { getGlobalDispatcher } = __nccwpck_require__(2581) +const { types } = __nccwpck_require__(9023) + +let experimentalWarned = false + +// https://websockets.spec.whatwg.org/#interface-definition +class WebSocket extends EventTarget { + #events = { + open: null, + error: null, + close: null, + message: null + } + + #bufferedAmount = 0 + #protocol = '' + #extensions = '' + + /** + * @param {string} url + * @param {string|string[]} protocols + */ + constructor (url, protocols = []) { + super() + + webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket constructor' }) + + if (!experimentalWarned) { + experimentalWarned = true + process.emitWarning('WebSockets are experimental, expect them to change at any time.', { + code: 'UNDICI-WS' + }) + } + + const options = webidl.converters['DOMString or sequence or WebSocketInit'](protocols) + + url = webidl.converters.USVString(url) + protocols = options.protocols + + // 1. Let baseURL be this's relevant settings object's API base URL. + const baseURL = getGlobalOrigin() + + // 1. Let urlRecord be the result of applying the URL parser to url with baseURL. + let urlRecord + + try { + urlRecord = new URL(url, baseURL) + } catch (e) { + // 3. If urlRecord is failure, then throw a "SyntaxError" DOMException. + throw new DOMException(e, 'SyntaxError') + } + + // 4. If urlRecord’s scheme is "http", then set urlRecord’s scheme to "ws". + if (urlRecord.protocol === 'http:') { + urlRecord.protocol = 'ws:' + } else if (urlRecord.protocol === 'https:') { + // 5. Otherwise, if urlRecord’s scheme is "https", set urlRecord’s scheme to "wss". + urlRecord.protocol = 'wss:' + } + + // 6. If urlRecord’s scheme is not "ws" or "wss", then throw a "SyntaxError" DOMException. + if (urlRecord.protocol !== 'ws:' && urlRecord.protocol !== 'wss:') { + throw new DOMException( + `Expected a ws: or wss: protocol, got ${urlRecord.protocol}`, + 'SyntaxError' + ) + } + + // 7. If urlRecord’s fragment is non-null, then throw a "SyntaxError" + // DOMException. + if (urlRecord.hash || urlRecord.href.endsWith('#')) { + throw new DOMException('Got fragment', 'SyntaxError') + } + + // 8. If protocols is a string, set protocols to a sequence consisting + // of just that string. + if (typeof protocols === 'string') { + protocols = [protocols] + } + + // 9. If any of the values in protocols occur more than once or otherwise + // fail to match the requirements for elements that comprise the value + // of `Sec-WebSocket-Protocol` fields as defined by The WebSocket + // protocol, then throw a "SyntaxError" DOMException. + if (protocols.length !== new Set(protocols.map(p => p.toLowerCase())).size) { + throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError') + } + + if (protocols.length > 0 && !protocols.every(p => isValidSubprotocol(p))) { + throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError') + } + + // 10. Set this's url to urlRecord. + this[kWebSocketURL] = new URL(urlRecord.href) + + // 11. Let client be this's relevant settings object. + + // 12. Run this step in parallel: + + // 1. Establish a WebSocket connection given urlRecord, protocols, + // and client. + this[kController] = establishWebSocketConnection( + urlRecord, + protocols, + this, + (response) => this.#onConnectionEstablished(response), + options + ) + + // Each WebSocket object has an associated ready state, which is a + // number representing the state of the connection. Initially it must + // be CONNECTING (0). + this[kReadyState] = WebSocket.CONNECTING + + // The extensions attribute must initially return the empty string. + + // The protocol attribute must initially return the empty string. + + // Each WebSocket object has an associated binary type, which is a + // BinaryType. Initially it must be "blob". + this[kBinaryType] = 'blob' + } + + /** + * @see https://websockets.spec.whatwg.org/#dom-websocket-close + * @param {number|undefined} code + * @param {string|undefined} reason + */ + close (code = undefined, reason = undefined) { + webidl.brandCheck(this, WebSocket) + + if (code !== undefined) { + code = webidl.converters['unsigned short'](code, { clamp: true }) + } + + if (reason !== undefined) { + reason = webidl.converters.USVString(reason) + } + + // 1. If code is present, but is neither an integer equal to 1000 nor an + // integer in the range 3000 to 4999, inclusive, throw an + // "InvalidAccessError" DOMException. + if (code !== undefined) { + if (code !== 1000 && (code < 3000 || code > 4999)) { + throw new DOMException('invalid code', 'InvalidAccessError') + } + } + + let reasonByteLength = 0 + + // 2. If reason is present, then run these substeps: + if (reason !== undefined) { + // 1. Let reasonBytes be the result of encoding reason. + // 2. If reasonBytes is longer than 123 bytes, then throw a + // "SyntaxError" DOMException. + reasonByteLength = Buffer.byteLength(reason) + + if (reasonByteLength > 123) { + throw new DOMException( + `Reason must be less than 123 bytes; received ${reasonByteLength}`, + 'SyntaxError' + ) + } + } + + // 3. Run the first matching steps from the following list: + if (this[kReadyState] === WebSocket.CLOSING || this[kReadyState] === WebSocket.CLOSED) { + // If this's ready state is CLOSING (2) or CLOSED (3) + // Do nothing. + } else if (!isEstablished(this)) { + // If the WebSocket connection is not yet established + // Fail the WebSocket connection and set this's ready state + // to CLOSING (2). + failWebsocketConnection(this, 'Connection was closed before it was established.') + this[kReadyState] = WebSocket.CLOSING + } else if (!isClosing(this)) { + // If the WebSocket closing handshake has not yet been started + // Start the WebSocket closing handshake and set this's ready + // state to CLOSING (2). + // - If neither code nor reason is present, the WebSocket Close + // message must not have a body. + // - If code is present, then the status code to use in the + // WebSocket Close message must be the integer given by code. + // - If reason is also present, then reasonBytes must be + // provided in the Close message after the status code. + + const frame = new WebsocketFrameSend() + + // If neither code nor reason is present, the WebSocket Close + // message must not have a body. + + // If code is present, then the status code to use in the + // WebSocket Close message must be the integer given by code. + if (code !== undefined && reason === undefined) { + frame.frameData = Buffer.allocUnsafe(2) + frame.frameData.writeUInt16BE(code, 0) + } else if (code !== undefined && reason !== undefined) { + // If reason is also present, then reasonBytes must be + // provided in the Close message after the status code. + frame.frameData = Buffer.allocUnsafe(2 + reasonByteLength) + frame.frameData.writeUInt16BE(code, 0) + // the body MAY contain UTF-8-encoded data with value /reason/ + frame.frameData.write(reason, 2, 'utf-8') + } else { + frame.frameData = emptyBuffer + } + + /** @type {import('stream').Duplex} */ + const socket = this[kResponse].socket + + socket.write(frame.createFrame(opcodes.CLOSE), (err) => { + if (!err) { + this[kSentClose] = true + } + }) + + // Upon either sending or receiving a Close control frame, it is said + // that _The WebSocket Closing Handshake is Started_ and that the + // WebSocket connection is in the CLOSING state. + this[kReadyState] = states.CLOSING + } else { + // Otherwise + // Set this's ready state to CLOSING (2). + this[kReadyState] = WebSocket.CLOSING + } + } + + /** + * @see https://websockets.spec.whatwg.org/#dom-websocket-send + * @param {NodeJS.TypedArray|ArrayBuffer|Blob|string} data + */ + send (data) { + webidl.brandCheck(this, WebSocket) + + webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket.send' }) + + data = webidl.converters.WebSocketSendData(data) + + // 1. If this's ready state is CONNECTING, then throw an + // "InvalidStateError" DOMException. + if (this[kReadyState] === WebSocket.CONNECTING) { + throw new DOMException('Sent before connected.', 'InvalidStateError') + } + + // 2. Run the appropriate set of steps from the following list: + // https://datatracker.ietf.org/doc/html/rfc6455#section-6.1 + // https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 + + if (!isEstablished(this) || isClosing(this)) { + return + } + + /** @type {import('stream').Duplex} */ + const socket = this[kResponse].socket + + // If data is a string + if (typeof data === 'string') { + // If the WebSocket connection is established and the WebSocket + // closing handshake has not yet started, then the user agent + // must send a WebSocket Message comprised of the data argument + // using a text frame opcode; if the data cannot be sent, e.g. + // because it would need to be buffered but the buffer is full, + // the user agent must flag the WebSocket as full and then close + // the WebSocket connection. Any invocation of this method with a + // string argument that does not throw an exception must increase + // the bufferedAmount attribute by the number of bytes needed to + // express the argument as UTF-8. + + const value = Buffer.from(data) + const frame = new WebsocketFrameSend(value) + const buffer = frame.createFrame(opcodes.TEXT) + + this.#bufferedAmount += value.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= value.byteLength + }) + } else if (types.isArrayBuffer(data)) { + // If the WebSocket connection is established, and the WebSocket + // closing handshake has not yet started, then the user agent must + // send a WebSocket Message comprised of data using a binary frame + // opcode; if the data cannot be sent, e.g. because it would need + // to be buffered but the buffer is full, the user agent must flag + // the WebSocket as full and then close the WebSocket connection. + // The data to be sent is the data stored in the buffer described + // by the ArrayBuffer object. Any invocation of this method with an + // ArrayBuffer argument that does not throw an exception must + // increase the bufferedAmount attribute by the length of the + // ArrayBuffer in bytes. + + const value = Buffer.from(data) + const frame = new WebsocketFrameSend(value) + const buffer = frame.createFrame(opcodes.BINARY) + + this.#bufferedAmount += value.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= value.byteLength + }) + } else if (ArrayBuffer.isView(data)) { + // If the WebSocket connection is established, and the WebSocket + // closing handshake has not yet started, then the user agent must + // send a WebSocket Message comprised of data using a binary frame + // opcode; if the data cannot be sent, e.g. because it would need to + // be buffered but the buffer is full, the user agent must flag the + // WebSocket as full and then close the WebSocket connection. The + // data to be sent is the data stored in the section of the buffer + // described by the ArrayBuffer object that data references. Any + // invocation of this method with this kind of argument that does + // not throw an exception must increase the bufferedAmount attribute + // by the length of data’s buffer in bytes. + + const ab = Buffer.from(data, data.byteOffset, data.byteLength) + + const frame = new WebsocketFrameSend(ab) + const buffer = frame.createFrame(opcodes.BINARY) + + this.#bufferedAmount += ab.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= ab.byteLength + }) + } else if (isBlobLike(data)) { + // If the WebSocket connection is established, and the WebSocket + // closing handshake has not yet started, then the user agent must + // send a WebSocket Message comprised of data using a binary frame + // opcode; if the data cannot be sent, e.g. because it would need to + // be buffered but the buffer is full, the user agent must flag the + // WebSocket as full and then close the WebSocket connection. The data + // to be sent is the raw data represented by the Blob object. Any + // invocation of this method with a Blob argument that does not throw + // an exception must increase the bufferedAmount attribute by the size + // of the Blob object’s raw data, in bytes. + + const frame = new WebsocketFrameSend() + + data.arrayBuffer().then((ab) => { + const value = Buffer.from(ab) + frame.frameData = value + const buffer = frame.createFrame(opcodes.BINARY) + + this.#bufferedAmount += value.byteLength + socket.write(buffer, () => { + this.#bufferedAmount -= value.byteLength + }) + }) + } + } + + get readyState () { + webidl.brandCheck(this, WebSocket) + + // The readyState getter steps are to return this's ready state. + return this[kReadyState] + } + + get bufferedAmount () { + webidl.brandCheck(this, WebSocket) + + return this.#bufferedAmount + } + + get url () { + webidl.brandCheck(this, WebSocket) + + // The url getter steps are to return this's url, serialized. + return URLSerializer(this[kWebSocketURL]) + } + + get extensions () { + webidl.brandCheck(this, WebSocket) + + return this.#extensions + } + + get protocol () { + webidl.brandCheck(this, WebSocket) + + return this.#protocol + } + + get onopen () { + webidl.brandCheck(this, WebSocket) + + return this.#events.open + } + + set onopen (fn) { + webidl.brandCheck(this, WebSocket) + + if (this.#events.open) { + this.removeEventListener('open', this.#events.open) + } + + if (typeof fn === 'function') { + this.#events.open = fn + this.addEventListener('open', fn) + } else { + this.#events.open = null + } + } + + get onerror () { + webidl.brandCheck(this, WebSocket) + + return this.#events.error + } + + set onerror (fn) { + webidl.brandCheck(this, WebSocket) + + if (this.#events.error) { + this.removeEventListener('error', this.#events.error) + } + + if (typeof fn === 'function') { + this.#events.error = fn + this.addEventListener('error', fn) + } else { + this.#events.error = null + } + } + + get onclose () { + webidl.brandCheck(this, WebSocket) + + return this.#events.close + } + + set onclose (fn) { + webidl.brandCheck(this, WebSocket) + + if (this.#events.close) { + this.removeEventListener('close', this.#events.close) + } + + if (typeof fn === 'function') { + this.#events.close = fn + this.addEventListener('close', fn) + } else { + this.#events.close = null + } + } + + get onmessage () { + webidl.brandCheck(this, WebSocket) + + return this.#events.message + } + + set onmessage (fn) { + webidl.brandCheck(this, WebSocket) + + if (this.#events.message) { + this.removeEventListener('message', this.#events.message) + } + + if (typeof fn === 'function') { + this.#events.message = fn + this.addEventListener('message', fn) + } else { + this.#events.message = null + } + } + + get binaryType () { + webidl.brandCheck(this, WebSocket) + + return this[kBinaryType] + } + + set binaryType (type) { + webidl.brandCheck(this, WebSocket) + + if (type !== 'blob' && type !== 'arraybuffer') { + this[kBinaryType] = 'blob' + } else { + this[kBinaryType] = type + } + } + + /** + * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol + */ + #onConnectionEstablished (response) { + // processResponse is called when the "response’s header list has been received and initialized." + // once this happens, the connection is open + this[kResponse] = response + + const parser = new ByteParser(this) + parser.on('drain', function onParserDrain () { + this.ws[kResponse].socket.resume() + }) + + response.socket.ws = this + this[kByteParser] = parser + + // 1. Change the ready state to OPEN (1). + this[kReadyState] = states.OPEN + + // 2. Change the extensions attribute’s value to the extensions in use, if + // it is not the null value. + // https://datatracker.ietf.org/doc/html/rfc6455#section-9.1 + const extensions = response.headersList.get('sec-websocket-extensions') + + if (extensions !== null) { + this.#extensions = extensions + } + + // 3. Change the protocol attribute’s value to the subprotocol in use, if + // it is not the null value. + // https://datatracker.ietf.org/doc/html/rfc6455#section-1.9 + const protocol = response.headersList.get('sec-websocket-protocol') + + if (protocol !== null) { + this.#protocol = protocol + } + + // 4. Fire an event named open at the WebSocket object. + fireEvent('open', this) + } +} + +// https://websockets.spec.whatwg.org/#dom-websocket-connecting +WebSocket.CONNECTING = WebSocket.prototype.CONNECTING = states.CONNECTING +// https://websockets.spec.whatwg.org/#dom-websocket-open +WebSocket.OPEN = WebSocket.prototype.OPEN = states.OPEN +// https://websockets.spec.whatwg.org/#dom-websocket-closing +WebSocket.CLOSING = WebSocket.prototype.CLOSING = states.CLOSING +// https://websockets.spec.whatwg.org/#dom-websocket-closed +WebSocket.CLOSED = WebSocket.prototype.CLOSED = states.CLOSED + +Object.defineProperties(WebSocket.prototype, { + CONNECTING: staticPropertyDescriptors, + OPEN: staticPropertyDescriptors, + CLOSING: staticPropertyDescriptors, + CLOSED: staticPropertyDescriptors, + url: kEnumerableProperty, + readyState: kEnumerableProperty, + bufferedAmount: kEnumerableProperty, + onopen: kEnumerableProperty, + onerror: kEnumerableProperty, + onclose: kEnumerableProperty, + close: kEnumerableProperty, + onmessage: kEnumerableProperty, + binaryType: kEnumerableProperty, + send: kEnumerableProperty, + extensions: kEnumerableProperty, + protocol: kEnumerableProperty, + [Symbol.toStringTag]: { + value: 'WebSocket', + writable: false, + enumerable: false, + configurable: true + } +}) + +Object.defineProperties(WebSocket, { + CONNECTING: staticPropertyDescriptors, + OPEN: staticPropertyDescriptors, + CLOSING: staticPropertyDescriptors, + CLOSED: staticPropertyDescriptors +}) + +webidl.converters['sequence'] = webidl.sequenceConverter( + webidl.converters.DOMString +) + +webidl.converters['DOMString or sequence'] = function (V) { + if (webidl.util.Type(V) === 'Object' && Symbol.iterator in V) { + return webidl.converters['sequence'](V) + } + + return webidl.converters.DOMString(V) +} + +// This implements the propsal made in https://github.com/whatwg/websockets/issues/42 +webidl.converters.WebSocketInit = webidl.dictionaryConverter([ + { + key: 'protocols', + converter: webidl.converters['DOMString or sequence'], + get defaultValue () { + return [] + } + }, + { + key: 'dispatcher', + converter: (V) => V, + get defaultValue () { + return getGlobalDispatcher() + } + }, + { + key: 'headers', + converter: webidl.nullableConverter(webidl.converters.HeadersInit) + } +]) + +webidl.converters['DOMString or sequence or WebSocketInit'] = function (V) { + if (webidl.util.Type(V) === 'Object' && !(Symbol.iterator in V)) { + return webidl.converters.WebSocketInit(V) + } + + return { protocols: webidl.converters['DOMString or sequence'](V) } +} + +webidl.converters.WebSocketSendData = function (V) { + if (webidl.util.Type(V) === 'Object') { + if (isBlobLike(V)) { + return webidl.converters.Blob(V, { strict: false }) + } + + if (ArrayBuffer.isView(V) || types.isAnyArrayBuffer(V)) { + return webidl.converters.BufferSource(V) + } + } + + return webidl.converters.USVString(V) +} + +module.exports = { + WebSocket +} + + +/***/ }), + +/***/ 3843: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +function getUserAgent() { + if (typeof navigator === "object" && "userAgent" in navigator) { + return navigator.userAgent; + } + + if (typeof process === "object" && process.version !== undefined) { + return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; + } + + return ""; +} + +exports.getUserAgent = getUserAgent; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 2048: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +Object.defineProperty(exports, "NIL", ({ + enumerable: true, + get: function () { + return _nil.default; + } +})); +Object.defineProperty(exports, "parse", ({ + enumerable: true, + get: function () { + return _parse.default; + } +})); +Object.defineProperty(exports, "stringify", ({ + enumerable: true, + get: function () { + return _stringify.default; + } +})); +Object.defineProperty(exports, "v1", ({ + enumerable: true, + get: function () { + return _v.default; + } +})); +Object.defineProperty(exports, "v3", ({ + enumerable: true, + get: function () { + return _v2.default; + } +})); +Object.defineProperty(exports, "v4", ({ + enumerable: true, + get: function () { + return _v3.default; + } +})); +Object.defineProperty(exports, "v5", ({ + enumerable: true, + get: function () { + return _v4.default; + } +})); +Object.defineProperty(exports, "validate", ({ + enumerable: true, + get: function () { + return _validate.default; + } +})); +Object.defineProperty(exports, "version", ({ + enumerable: true, + get: function () { + return _version.default; + } +})); + +var _v = _interopRequireDefault(__nccwpck_require__(6415)); + +var _v2 = _interopRequireDefault(__nccwpck_require__(1697)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(4676)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(9771)); + +var _nil = _interopRequireDefault(__nccwpck_require__(7723)); + +var _version = _interopRequireDefault(__nccwpck_require__(5868)); + +var _validate = _interopRequireDefault(__nccwpck_require__(6200)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(7597)); + +var _parse = _interopRequireDefault(__nccwpck_require__(7267)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 216: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 4221: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +var _default = { + randomUUID: _crypto.default.randomUUID +}; +exports["default"] = _default; + +/***/ }), + +/***/ 7723: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 7267: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6200)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 7879: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 2973: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 507: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6982)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 7597: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +exports.unsafeStringify = unsafeStringify; + +var _validate = _interopRequireDefault(__nccwpck_require__(6200)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).slice(1)); +} + +function unsafeStringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; +} + +function stringify(arr, offset = 0) { + const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 6415: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(2973)); + +var _stringify = __nccwpck_require__(7597); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.unsafeStringify)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 1697: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(2930)); + +var _md = _interopRequireDefault(__nccwpck_require__(216)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 2930: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports.URL = exports.DNS = void 0; +exports["default"] = v35; + +var _stringify = __nccwpck_require__(7597); + +var _parse = _interopRequireDefault(__nccwpck_require__(7267)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function v35(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + var _namespace; + + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 4676: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _native = _interopRequireDefault(__nccwpck_require__(4221)); + +var _rng = _interopRequireDefault(__nccwpck_require__(2973)); + +var _stringify = __nccwpck_require__(7597); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + if (_native.default.randomUUID && !buf && !options) { + return _native.default.randomUUID(); + } + + options = options || {}; + + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return (0, _stringify.unsafeStringify)(rnds); +} + +var _default = v4; +exports["default"] = _default; + +/***/ }), + +/***/ 9771: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(2930)); + +var _sha = _interopRequireDefault(__nccwpck_require__(507)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; +exports["default"] = _default; + +/***/ }), + +/***/ 6200: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _regex = _interopRequireDefault(__nccwpck_require__(7879)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; +exports["default"] = _default; + +/***/ }), + +/***/ 5868: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6200)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function version(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + return parseInt(uuid.slice(14, 15), 16); +} + +var _default = version; +exports["default"] = _default; + +/***/ }), + +/***/ 8264: +/***/ ((module) => { + +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) + + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') + + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) + + return wrapper + + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret + } +} + + +/***/ }), + +/***/ 3176: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ZodError = exports.quotelessJson = exports.ZodIssueCode = void 0; +const util_1 = __nccwpck_require__(8771); +exports.ZodIssueCode = util_1.util.arrayToEnum([ + "invalid_type", + "invalid_literal", + "custom", + "invalid_union", + "invalid_union_discriminator", + "invalid_enum_value", + "unrecognized_keys", + "invalid_arguments", + "invalid_return_type", + "invalid_date", + "invalid_string", + "too_small", + "too_big", + "invalid_intersection_types", + "not_multiple_of", + "not_finite", +]); +const quotelessJson = (obj) => { + const json = JSON.stringify(obj, null, 2); + return json.replace(/"([^"]+)":/g, "$1:"); +}; +exports.quotelessJson = quotelessJson; +class ZodError extends Error { + get errors() { + return this.issues; + } + constructor(issues) { + super(); + this.issues = []; + this.addIssue = (sub) => { + this.issues = [...this.issues, sub]; + }; + this.addIssues = (subs = []) => { + this.issues = [...this.issues, ...subs]; + }; + const actualProto = new.target.prototype; + if (Object.setPrototypeOf) { + // eslint-disable-next-line ban/ban + Object.setPrototypeOf(this, actualProto); + } + else { + this.__proto__ = actualProto; + } + this.name = "ZodError"; + this.issues = issues; + } + format(_mapper) { + const mapper = _mapper || + function (issue) { + return issue.message; + }; + const fieldErrors = { _errors: [] }; + const processError = (error) => { + for (const issue of error.issues) { + if (issue.code === "invalid_union") { + issue.unionErrors.map(processError); + } + else if (issue.code === "invalid_return_type") { + processError(issue.returnTypeError); + } + else if (issue.code === "invalid_arguments") { + processError(issue.argumentsError); + } + else if (issue.path.length === 0) { + fieldErrors._errors.push(mapper(issue)); + } + else { + let curr = fieldErrors; + let i = 0; + while (i < issue.path.length) { + const el = issue.path[i]; + const terminal = i === issue.path.length - 1; + if (!terminal) { + curr[el] = curr[el] || { _errors: [] }; + // if (typeof el === "string") { + // curr[el] = curr[el] || { _errors: [] }; + // } else if (typeof el === "number") { + // const errorArray: any = []; + // errorArray._errors = []; + // curr[el] = curr[el] || errorArray; + // } + } + else { + curr[el] = curr[el] || { _errors: [] }; + curr[el]._errors.push(mapper(issue)); + } + curr = curr[el]; + i++; + } + } + } + }; + processError(this); + return fieldErrors; + } + static assert(value) { + if (!(value instanceof ZodError)) { + throw new Error(`Not a ZodError: ${value}`); + } + } + toString() { + return this.message; + } + get message() { + return JSON.stringify(this.issues, util_1.util.jsonStringifyReplacer, 2); + } + get isEmpty() { + return this.issues.length === 0; + } + flatten(mapper = (issue) => issue.message) { + const fieldErrors = {}; + const formErrors = []; + for (const sub of this.issues) { + if (sub.path.length > 0) { + fieldErrors[sub.path[0]] = fieldErrors[sub.path[0]] || []; + fieldErrors[sub.path[0]].push(mapper(sub)); + } + else { + formErrors.push(mapper(sub)); + } + } + return { formErrors, fieldErrors }; + } + get formErrors() { + return this.flatten(); + } +} +exports.ZodError = ZodError; +ZodError.create = (issues) => { + const error = new ZodError(issues); + return error; +}; + + +/***/ }), + +/***/ 5918: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getErrorMap = exports.setErrorMap = exports.defaultErrorMap = void 0; +const en_1 = __importDefault(__nccwpck_require__(9294)); +exports.defaultErrorMap = en_1.default; +let overrideErrorMap = en_1.default; +function setErrorMap(map) { + overrideErrorMap = map; +} +exports.setErrorMap = setErrorMap; +function getErrorMap() { + return overrideErrorMap; +} +exports.getErrorMap = getErrorMap; + + +/***/ }), + +/***/ 9746: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +__exportStar(__nccwpck_require__(5918), exports); +__exportStar(__nccwpck_require__(4640), exports); +__exportStar(__nccwpck_require__(5935), exports); +__exportStar(__nccwpck_require__(8771), exports); +__exportStar(__nccwpck_require__(5128), exports); +__exportStar(__nccwpck_require__(3176), exports); + + +/***/ }), + +/***/ 189: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.errorUtil = void 0; +var errorUtil; +(function (errorUtil) { + errorUtil.errToObj = (message) => typeof message === "string" ? { message } : message || {}; + errorUtil.toString = (message) => typeof message === "string" ? message : message === null || message === void 0 ? void 0 : message.message; +})(errorUtil || (exports.errorUtil = errorUtil = {})); + + +/***/ }), + +/***/ 4640: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.isAsync = exports.isValid = exports.isDirty = exports.isAborted = exports.OK = exports.DIRTY = exports.INVALID = exports.ParseStatus = exports.addIssueToContext = exports.EMPTY_PATH = exports.makeIssue = void 0; +const errors_1 = __nccwpck_require__(5918); +const en_1 = __importDefault(__nccwpck_require__(9294)); +const makeIssue = (params) => { + const { data, path, errorMaps, issueData } = params; + const fullPath = [...path, ...(issueData.path || [])]; + const fullIssue = { + ...issueData, + path: fullPath, + }; + if (issueData.message !== undefined) { + return { + ...issueData, + path: fullPath, + message: issueData.message, + }; + } + let errorMessage = ""; + const maps = errorMaps + .filter((m) => !!m) + .slice() + .reverse(); + for (const map of maps) { + errorMessage = map(fullIssue, { data, defaultError: errorMessage }).message; + } + return { + ...issueData, + path: fullPath, + message: errorMessage, + }; +}; +exports.makeIssue = makeIssue; +exports.EMPTY_PATH = []; +function addIssueToContext(ctx, issueData) { + const overrideMap = (0, errors_1.getErrorMap)(); + const issue = (0, exports.makeIssue)({ + issueData: issueData, + data: ctx.data, + path: ctx.path, + errorMaps: [ + ctx.common.contextualErrorMap, // contextual error map is first priority + ctx.schemaErrorMap, // then schema-bound map if available + overrideMap, // then global override map + overrideMap === en_1.default ? undefined : en_1.default, // then global default map + ].filter((x) => !!x), + }); + ctx.common.issues.push(issue); +} +exports.addIssueToContext = addIssueToContext; +class ParseStatus { + constructor() { + this.value = "valid"; + } + dirty() { + if (this.value === "valid") + this.value = "dirty"; + } + abort() { + if (this.value !== "aborted") + this.value = "aborted"; + } + static mergeArray(status, results) { + const arrayValue = []; + for (const s of results) { + if (s.status === "aborted") + return exports.INVALID; + if (s.status === "dirty") + status.dirty(); + arrayValue.push(s.value); + } + return { status: status.value, value: arrayValue }; + } + static async mergeObjectAsync(status, pairs) { + const syncPairs = []; + for (const pair of pairs) { + const key = await pair.key; + const value = await pair.value; + syncPairs.push({ + key, + value, + }); + } + return ParseStatus.mergeObjectSync(status, syncPairs); + } + static mergeObjectSync(status, pairs) { + const finalObject = {}; + for (const pair of pairs) { + const { key, value } = pair; + if (key.status === "aborted") + return exports.INVALID; + if (value.status === "aborted") + return exports.INVALID; + if (key.status === "dirty") + status.dirty(); + if (value.status === "dirty") + status.dirty(); + if (key.value !== "__proto__" && + (typeof value.value !== "undefined" || pair.alwaysSet)) { + finalObject[key.value] = value.value; + } + } + return { status: status.value, value: finalObject }; + } +} +exports.ParseStatus = ParseStatus; +exports.INVALID = Object.freeze({ + status: "aborted", +}); +const DIRTY = (value) => ({ status: "dirty", value }); +exports.DIRTY = DIRTY; +const OK = (value) => ({ status: "valid", value }); +exports.OK = OK; +const isAborted = (x) => x.status === "aborted"; +exports.isAborted = isAborted; +const isDirty = (x) => x.status === "dirty"; +exports.isDirty = isDirty; +const isValid = (x) => x.status === "valid"; +exports.isValid = isValid; +const isAsync = (x) => typeof Promise !== "undefined" && x instanceof Promise; +exports.isAsync = isAsync; + + +/***/ }), + +/***/ 5935: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + + +/***/ }), + +/***/ 8771: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getParsedType = exports.ZodParsedType = exports.objectUtil = exports.util = void 0; +var util; +(function (util) { + util.assertEqual = (val) => val; + function assertIs(_arg) { } + util.assertIs = assertIs; + function assertNever(_x) { + throw new Error(); + } + util.assertNever = assertNever; + util.arrayToEnum = (items) => { + const obj = {}; + for (const item of items) { + obj[item] = item; + } + return obj; + }; + util.getValidEnumValues = (obj) => { + const validKeys = util.objectKeys(obj).filter((k) => typeof obj[obj[k]] !== "number"); + const filtered = {}; + for (const k of validKeys) { + filtered[k] = obj[k]; + } + return util.objectValues(filtered); + }; + util.objectValues = (obj) => { + return util.objectKeys(obj).map(function (e) { + return obj[e]; + }); + }; + util.objectKeys = typeof Object.keys === "function" // eslint-disable-line ban/ban + ? (obj) => Object.keys(obj) // eslint-disable-line ban/ban + : (object) => { + const keys = []; + for (const key in object) { + if (Object.prototype.hasOwnProperty.call(object, key)) { + keys.push(key); + } + } + return keys; + }; + util.find = (arr, checker) => { + for (const item of arr) { + if (checker(item)) + return item; + } + return undefined; + }; + util.isInteger = typeof Number.isInteger === "function" + ? (val) => Number.isInteger(val) // eslint-disable-line ban/ban + : (val) => typeof val === "number" && isFinite(val) && Math.floor(val) === val; + function joinValues(array, separator = " | ") { + return array + .map((val) => (typeof val === "string" ? `'${val}'` : val)) + .join(separator); + } + util.joinValues = joinValues; + util.jsonStringifyReplacer = (_, value) => { + if (typeof value === "bigint") { + return value.toString(); + } + return value; + }; +})(util || (exports.util = util = {})); +var objectUtil; +(function (objectUtil) { + objectUtil.mergeShapes = (first, second) => { + return { + ...first, + ...second, // second overwrites first + }; + }; +})(objectUtil || (exports.objectUtil = objectUtil = {})); +exports.ZodParsedType = util.arrayToEnum([ + "string", + "nan", + "number", + "integer", + "float", + "boolean", + "date", + "bigint", + "symbol", + "function", + "undefined", + "null", + "array", + "object", + "unknown", + "promise", + "void", + "never", + "map", + "set", +]); +const getParsedType = (data) => { + const t = typeof data; + switch (t) { + case "undefined": + return exports.ZodParsedType.undefined; + case "string": + return exports.ZodParsedType.string; + case "number": + return isNaN(data) ? exports.ZodParsedType.nan : exports.ZodParsedType.number; + case "boolean": + return exports.ZodParsedType.boolean; + case "function": + return exports.ZodParsedType.function; + case "bigint": + return exports.ZodParsedType.bigint; + case "symbol": + return exports.ZodParsedType.symbol; + case "object": + if (Array.isArray(data)) { + return exports.ZodParsedType.array; + } + if (data === null) { + return exports.ZodParsedType.null; + } + if (data.then && + typeof data.then === "function" && + data.catch && + typeof data.catch === "function") { + return exports.ZodParsedType.promise; + } + if (typeof Map !== "undefined" && data instanceof Map) { + return exports.ZodParsedType.map; + } + if (typeof Set !== "undefined" && data instanceof Set) { + return exports.ZodParsedType.set; + } + if (typeof Date !== "undefined" && data instanceof Date) { + return exports.ZodParsedType.date; + } + return exports.ZodParsedType.object; + default: + return exports.ZodParsedType.unknown; + } +}; +exports.getParsedType = getParsedType; + + +/***/ }), + +/***/ 4809: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.z = void 0; +const z = __importStar(__nccwpck_require__(9746)); +exports.z = z; +__exportStar(__nccwpck_require__(9746), exports); +exports["default"] = z; + + +/***/ }), + +/***/ 9294: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const util_1 = __nccwpck_require__(8771); +const ZodError_1 = __nccwpck_require__(3176); +const errorMap = (issue, _ctx) => { + let message; + switch (issue.code) { + case ZodError_1.ZodIssueCode.invalid_type: + if (issue.received === util_1.ZodParsedType.undefined) { + message = "Required"; + } + else { + message = `Expected ${issue.expected}, received ${issue.received}`; + } + break; + case ZodError_1.ZodIssueCode.invalid_literal: + message = `Invalid literal value, expected ${JSON.stringify(issue.expected, util_1.util.jsonStringifyReplacer)}`; + break; + case ZodError_1.ZodIssueCode.unrecognized_keys: + message = `Unrecognized key(s) in object: ${util_1.util.joinValues(issue.keys, ", ")}`; + break; + case ZodError_1.ZodIssueCode.invalid_union: + message = `Invalid input`; + break; + case ZodError_1.ZodIssueCode.invalid_union_discriminator: + message = `Invalid discriminator value. Expected ${util_1.util.joinValues(issue.options)}`; + break; + case ZodError_1.ZodIssueCode.invalid_enum_value: + message = `Invalid enum value. Expected ${util_1.util.joinValues(issue.options)}, received '${issue.received}'`; + break; + case ZodError_1.ZodIssueCode.invalid_arguments: + message = `Invalid function arguments`; + break; + case ZodError_1.ZodIssueCode.invalid_return_type: + message = `Invalid function return type`; + break; + case ZodError_1.ZodIssueCode.invalid_date: + message = `Invalid date`; + break; + case ZodError_1.ZodIssueCode.invalid_string: + if (typeof issue.validation === "object") { + if ("includes" in issue.validation) { + message = `Invalid input: must include "${issue.validation.includes}"`; + if (typeof issue.validation.position === "number") { + message = `${message} at one or more positions greater than or equal to ${issue.validation.position}`; + } + } + else if ("startsWith" in issue.validation) { + message = `Invalid input: must start with "${issue.validation.startsWith}"`; + } + else if ("endsWith" in issue.validation) { + message = `Invalid input: must end with "${issue.validation.endsWith}"`; + } + else { + util_1.util.assertNever(issue.validation); + } + } + else if (issue.validation !== "regex") { + message = `Invalid ${issue.validation}`; + } + else { + message = "Invalid"; + } + break; + case ZodError_1.ZodIssueCode.too_small: + if (issue.type === "array") + message = `Array must contain ${issue.exact ? "exactly" : issue.inclusive ? `at least` : `more than`} ${issue.minimum} element(s)`; + else if (issue.type === "string") + message = `String must contain ${issue.exact ? "exactly" : issue.inclusive ? `at least` : `over`} ${issue.minimum} character(s)`; + else if (issue.type === "number") + message = `Number must be ${issue.exact + ? `exactly equal to ` + : issue.inclusive + ? `greater than or equal to ` + : `greater than `}${issue.minimum}`; + else if (issue.type === "date") + message = `Date must be ${issue.exact + ? `exactly equal to ` + : issue.inclusive + ? `greater than or equal to ` + : `greater than `}${new Date(Number(issue.minimum))}`; + else + message = "Invalid input"; + break; + case ZodError_1.ZodIssueCode.too_big: + if (issue.type === "array") + message = `Array must contain ${issue.exact ? `exactly` : issue.inclusive ? `at most` : `less than`} ${issue.maximum} element(s)`; + else if (issue.type === "string") + message = `String must contain ${issue.exact ? `exactly` : issue.inclusive ? `at most` : `under`} ${issue.maximum} character(s)`; + else if (issue.type === "number") + message = `Number must be ${issue.exact + ? `exactly` + : issue.inclusive + ? `less than or equal to` + : `less than`} ${issue.maximum}`; + else if (issue.type === "bigint") + message = `BigInt must be ${issue.exact + ? `exactly` + : issue.inclusive + ? `less than or equal to` + : `less than`} ${issue.maximum}`; + else if (issue.type === "date") + message = `Date must be ${issue.exact + ? `exactly` + : issue.inclusive + ? `smaller than or equal to` + : `smaller than`} ${new Date(Number(issue.maximum))}`; + else + message = "Invalid input"; + break; + case ZodError_1.ZodIssueCode.custom: + message = `Invalid input`; + break; + case ZodError_1.ZodIssueCode.invalid_intersection_types: + message = `Intersection results could not be merged`; + break; + case ZodError_1.ZodIssueCode.not_multiple_of: + message = `Number must be a multiple of ${issue.multipleOf}`; + break; + case ZodError_1.ZodIssueCode.not_finite: + message = "Number must be finite"; + break; + default: + message = _ctx.defaultError; + util_1.util.assertNever(issue); + } + return { message }; +}; +exports["default"] = errorMap; + + +/***/ }), + +/***/ 5128: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _ZodEnum_cache, _ZodNativeEnum_cache; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.boolean = exports.bigint = exports.array = exports.any = exports.coerce = exports.ZodFirstPartyTypeKind = exports.late = exports.ZodSchema = exports.Schema = exports.custom = exports.ZodReadonly = exports.ZodPipeline = exports.ZodBranded = exports.BRAND = exports.ZodNaN = exports.ZodCatch = exports.ZodDefault = exports.ZodNullable = exports.ZodOptional = exports.ZodTransformer = exports.ZodEffects = exports.ZodPromise = exports.ZodNativeEnum = exports.ZodEnum = exports.ZodLiteral = exports.ZodLazy = exports.ZodFunction = exports.ZodSet = exports.ZodMap = exports.ZodRecord = exports.ZodTuple = exports.ZodIntersection = exports.ZodDiscriminatedUnion = exports.ZodUnion = exports.ZodObject = exports.ZodArray = exports.ZodVoid = exports.ZodNever = exports.ZodUnknown = exports.ZodAny = exports.ZodNull = exports.ZodUndefined = exports.ZodSymbol = exports.ZodDate = exports.ZodBoolean = exports.ZodBigInt = exports.ZodNumber = exports.ZodString = exports.datetimeRegex = exports.ZodType = void 0; +exports.NEVER = exports["void"] = exports.unknown = exports.union = exports.undefined = exports.tuple = exports.transformer = exports.symbol = exports.string = exports.strictObject = exports.set = exports.record = exports.promise = exports.preprocess = exports.pipeline = exports.ostring = exports.optional = exports.onumber = exports.oboolean = exports.object = exports.number = exports.nullable = exports["null"] = exports.never = exports.nativeEnum = exports.nan = exports.map = exports.literal = exports.lazy = exports.intersection = exports["instanceof"] = exports["function"] = exports["enum"] = exports.effect = exports.discriminatedUnion = exports.date = void 0; +const errors_1 = __nccwpck_require__(5918); +const errorUtil_1 = __nccwpck_require__(189); +const parseUtil_1 = __nccwpck_require__(4640); +const util_1 = __nccwpck_require__(8771); +const ZodError_1 = __nccwpck_require__(3176); +class ParseInputLazyPath { + constructor(parent, value, path, key) { + this._cachedPath = []; + this.parent = parent; + this.data = value; + this._path = path; + this._key = key; + } + get path() { + if (!this._cachedPath.length) { + if (this._key instanceof Array) { + this._cachedPath.push(...this._path, ...this._key); + } + else { + this._cachedPath.push(...this._path, this._key); + } + } + return this._cachedPath; + } +} +const handleResult = (ctx, result) => { + if ((0, parseUtil_1.isValid)(result)) { + return { success: true, data: result.value }; + } + else { + if (!ctx.common.issues.length) { + throw new Error("Validation failed but no issues detected."); + } + return { + success: false, + get error() { + if (this._error) + return this._error; + const error = new ZodError_1.ZodError(ctx.common.issues); + this._error = error; + return this._error; + }, + }; + } +}; +function processCreateParams(params) { + if (!params) + return {}; + const { errorMap, invalid_type_error, required_error, description } = params; + if (errorMap && (invalid_type_error || required_error)) { + throw new Error(`Can't use "invalid_type_error" or "required_error" in conjunction with custom error map.`); + } + if (errorMap) + return { errorMap: errorMap, description }; + const customMap = (iss, ctx) => { + var _a, _b; + const { message } = params; + if (iss.code === "invalid_enum_value") { + return { message: message !== null && message !== void 0 ? message : ctx.defaultError }; + } + if (typeof ctx.data === "undefined") { + return { message: (_a = message !== null && message !== void 0 ? message : required_error) !== null && _a !== void 0 ? _a : ctx.defaultError }; + } + if (iss.code !== "invalid_type") + return { message: ctx.defaultError }; + return { message: (_b = message !== null && message !== void 0 ? message : invalid_type_error) !== null && _b !== void 0 ? _b : ctx.defaultError }; + }; + return { errorMap: customMap, description }; +} +class ZodType { + get description() { + return this._def.description; + } + _getType(input) { + return (0, util_1.getParsedType)(input.data); + } + _getOrReturnCtx(input, ctx) { + return (ctx || { + common: input.parent.common, + data: input.data, + parsedType: (0, util_1.getParsedType)(input.data), + schemaErrorMap: this._def.errorMap, + path: input.path, + parent: input.parent, + }); + } + _processInputParams(input) { + return { + status: new parseUtil_1.ParseStatus(), + ctx: { + common: input.parent.common, + data: input.data, + parsedType: (0, util_1.getParsedType)(input.data), + schemaErrorMap: this._def.errorMap, + path: input.path, + parent: input.parent, + }, + }; + } + _parseSync(input) { + const result = this._parse(input); + if ((0, parseUtil_1.isAsync)(result)) { + throw new Error("Synchronous parse encountered promise."); + } + return result; + } + _parseAsync(input) { + const result = this._parse(input); + return Promise.resolve(result); + } + parse(data, params) { + const result = this.safeParse(data, params); + if (result.success) + return result.data; + throw result.error; + } + safeParse(data, params) { + var _a; + const ctx = { + common: { + issues: [], + async: (_a = params === null || params === void 0 ? void 0 : params.async) !== null && _a !== void 0 ? _a : false, + contextualErrorMap: params === null || params === void 0 ? void 0 : params.errorMap, + }, + path: (params === null || params === void 0 ? void 0 : params.path) || [], + schemaErrorMap: this._def.errorMap, + parent: null, + data, + parsedType: (0, util_1.getParsedType)(data), + }; + const result = this._parseSync({ data, path: ctx.path, parent: ctx }); + return handleResult(ctx, result); + } + "~validate"(data) { + var _a, _b; + const ctx = { + common: { + issues: [], + async: !!this["~standard"].async, + }, + path: [], + schemaErrorMap: this._def.errorMap, + parent: null, + data, + parsedType: (0, util_1.getParsedType)(data), + }; + if (!this["~standard"].async) { + try { + const result = this._parseSync({ data, path: [], parent: ctx }); + return (0, parseUtil_1.isValid)(result) + ? { + value: result.value, + } + : { + issues: ctx.common.issues, + }; + } + catch (err) { + if ((_b = (_a = err === null || err === void 0 ? void 0 : err.message) === null || _a === void 0 ? void 0 : _a.toLowerCase()) === null || _b === void 0 ? void 0 : _b.includes("encountered")) { + this["~standard"].async = true; + } + ctx.common = { + issues: [], + async: true, + }; + } + } + return this._parseAsync({ data, path: [], parent: ctx }).then((result) => (0, parseUtil_1.isValid)(result) + ? { + value: result.value, + } + : { + issues: ctx.common.issues, + }); + } + async parseAsync(data, params) { + const result = await this.safeParseAsync(data, params); + if (result.success) + return result.data; + throw result.error; + } + async safeParseAsync(data, params) { + const ctx = { + common: { + issues: [], + contextualErrorMap: params === null || params === void 0 ? void 0 : params.errorMap, + async: true, + }, + path: (params === null || params === void 0 ? void 0 : params.path) || [], + schemaErrorMap: this._def.errorMap, + parent: null, + data, + parsedType: (0, util_1.getParsedType)(data), + }; + const maybeAsyncResult = this._parse({ data, path: ctx.path, parent: ctx }); + const result = await ((0, parseUtil_1.isAsync)(maybeAsyncResult) + ? maybeAsyncResult + : Promise.resolve(maybeAsyncResult)); + return handleResult(ctx, result); + } + refine(check, message) { + const getIssueProperties = (val) => { + if (typeof message === "string" || typeof message === "undefined") { + return { message }; + } + else if (typeof message === "function") { + return message(val); + } + else { + return message; + } + }; + return this._refinement((val, ctx) => { + const result = check(val); + const setError = () => ctx.addIssue({ + code: ZodError_1.ZodIssueCode.custom, + ...getIssueProperties(val), + }); + if (typeof Promise !== "undefined" && result instanceof Promise) { + return result.then((data) => { + if (!data) { + setError(); + return false; + } + else { + return true; + } + }); + } + if (!result) { + setError(); + return false; + } + else { + return true; + } + }); + } + refinement(check, refinementData) { + return this._refinement((val, ctx) => { + if (!check(val)) { + ctx.addIssue(typeof refinementData === "function" + ? refinementData(val, ctx) + : refinementData); + return false; + } + else { + return true; + } + }); + } + _refinement(refinement) { + return new ZodEffects({ + schema: this, + typeName: ZodFirstPartyTypeKind.ZodEffects, + effect: { type: "refinement", refinement }, + }); + } + superRefine(refinement) { + return this._refinement(refinement); + } + constructor(def) { + /** Alias of safeParseAsync */ + this.spa = this.safeParseAsync; + this._def = def; + this.parse = this.parse.bind(this); + this.safeParse = this.safeParse.bind(this); + this.parseAsync = this.parseAsync.bind(this); + this.safeParseAsync = this.safeParseAsync.bind(this); + this.spa = this.spa.bind(this); + this.refine = this.refine.bind(this); + this.refinement = this.refinement.bind(this); + this.superRefine = this.superRefine.bind(this); + this.optional = this.optional.bind(this); + this.nullable = this.nullable.bind(this); + this.nullish = this.nullish.bind(this); + this.array = this.array.bind(this); + this.promise = this.promise.bind(this); + this.or = this.or.bind(this); + this.and = this.and.bind(this); + this.transform = this.transform.bind(this); + this.brand = this.brand.bind(this); + this.default = this.default.bind(this); + this.catch = this.catch.bind(this); + this.describe = this.describe.bind(this); + this.pipe = this.pipe.bind(this); + this.readonly = this.readonly.bind(this); + this.isNullable = this.isNullable.bind(this); + this.isOptional = this.isOptional.bind(this); + this["~standard"] = { + version: 1, + vendor: "zod", + validate: (data) => this["~validate"](data), + }; + } + optional() { + return ZodOptional.create(this, this._def); + } + nullable() { + return ZodNullable.create(this, this._def); + } + nullish() { + return this.nullable().optional(); + } + array() { + return ZodArray.create(this); + } + promise() { + return ZodPromise.create(this, this._def); + } + or(option) { + return ZodUnion.create([this, option], this._def); + } + and(incoming) { + return ZodIntersection.create(this, incoming, this._def); + } + transform(transform) { + return new ZodEffects({ + ...processCreateParams(this._def), + schema: this, + typeName: ZodFirstPartyTypeKind.ZodEffects, + effect: { type: "transform", transform }, + }); + } + default(def) { + const defaultValueFunc = typeof def === "function" ? def : () => def; + return new ZodDefault({ + ...processCreateParams(this._def), + innerType: this, + defaultValue: defaultValueFunc, + typeName: ZodFirstPartyTypeKind.ZodDefault, + }); + } + brand() { + return new ZodBranded({ + typeName: ZodFirstPartyTypeKind.ZodBranded, + type: this, + ...processCreateParams(this._def), + }); + } + catch(def) { + const catchValueFunc = typeof def === "function" ? def : () => def; + return new ZodCatch({ + ...processCreateParams(this._def), + innerType: this, + catchValue: catchValueFunc, + typeName: ZodFirstPartyTypeKind.ZodCatch, + }); + } + describe(description) { + const This = this.constructor; + return new This({ + ...this._def, + description, + }); + } + pipe(target) { + return ZodPipeline.create(this, target); + } + readonly() { + return ZodReadonly.create(this); + } + isOptional() { + return this.safeParse(undefined).success; + } + isNullable() { + return this.safeParse(null).success; + } +} +exports.ZodType = ZodType; +exports.Schema = ZodType; +exports.ZodSchema = ZodType; +const cuidRegex = /^c[^\s-]{8,}$/i; +const cuid2Regex = /^[0-9a-z]+$/; +const ulidRegex = /^[0-9A-HJKMNP-TV-Z]{26}$/i; +// const uuidRegex = +// /^([a-f0-9]{8}-[a-f0-9]{4}-[1-5][a-f0-9]{3}-[a-f0-9]{4}-[a-f0-9]{12}|00000000-0000-0000-0000-000000000000)$/i; +const uuidRegex = /^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$/i; +const nanoidRegex = /^[a-z0-9_-]{21}$/i; +const jwtRegex = /^[A-Za-z0-9-_]+\.[A-Za-z0-9-_]+\.[A-Za-z0-9-_]*$/; +const durationRegex = /^[-+]?P(?!$)(?:(?:[-+]?\d+Y)|(?:[-+]?\d+[.,]\d+Y$))?(?:(?:[-+]?\d+M)|(?:[-+]?\d+[.,]\d+M$))?(?:(?:[-+]?\d+W)|(?:[-+]?\d+[.,]\d+W$))?(?:(?:[-+]?\d+D)|(?:[-+]?\d+[.,]\d+D$))?(?:T(?=[\d+-])(?:(?:[-+]?\d+H)|(?:[-+]?\d+[.,]\d+H$))?(?:(?:[-+]?\d+M)|(?:[-+]?\d+[.,]\d+M$))?(?:[-+]?\d+(?:[.,]\d+)?S)?)??$/; +// from https://stackoverflow.com/a/46181/1550155 +// old version: too slow, didn't support unicode +// const emailRegex = /^((([a-z]|\d|[!#\$%&'\*\+\-\/=\?\^_`{\|}~]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])+(\.([a-z]|\d|[!#\$%&'\*\+\-\/=\?\^_`{\|}~]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])+)*)|((\x22)((((\x20|\x09)*(\x0d\x0a))?(\x20|\x09)+)?(([\x01-\x08\x0b\x0c\x0e-\x1f\x7f]|\x21|[\x23-\x5b]|[\x5d-\x7e]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(\\([\x01-\x09\x0b\x0c\x0d-\x7f]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]))))*(((\x20|\x09)*(\x0d\x0a))?(\x20|\x09)+)?(\x22)))@((([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])*([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])))\.)+(([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])*([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])))$/i; +//old email regex +// const emailRegex = /^(([^<>()[\].,;:\s@"]+(\.[^<>()[\].,;:\s@"]+)*)|(".+"))@((?!-)([^<>()[\].,;:\s@"]+\.)+[^<>()[\].,;:\s@"]{1,})[^-<>()[\].,;:\s@"]$/i; +// eslint-disable-next-line +// const emailRegex = +// /^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[(((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2}))\.){3}((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2}))\])|(\[IPv6:(([a-f0-9]{1,4}:){7}|::([a-f0-9]{1,4}:){0,6}|([a-f0-9]{1,4}:){1}:([a-f0-9]{1,4}:){0,5}|([a-f0-9]{1,4}:){2}:([a-f0-9]{1,4}:){0,4}|([a-f0-9]{1,4}:){3}:([a-f0-9]{1,4}:){0,3}|([a-f0-9]{1,4}:){4}:([a-f0-9]{1,4}:){0,2}|([a-f0-9]{1,4}:){5}:([a-f0-9]{1,4}:){0,1})([a-f0-9]{1,4}|(((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2}))\.){3}((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2})))\])|([A-Za-z0-9]([A-Za-z0-9-]*[A-Za-z0-9])*(\.[A-Za-z]{2,})+))$/; +// const emailRegex = +// /^[a-zA-Z0-9\.\!\#\$\%\&\'\*\+\/\=\?\^\_\`\{\|\}\~\-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/; +// const emailRegex = +// /^(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*")@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?|\[(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?|[a-z0-9-]*[a-z0-9]:(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])$/i; +const emailRegex = /^(?!\.)(?!.*\.\.)([A-Z0-9_'+\-\.]*)[A-Z0-9_+-]@([A-Z0-9][A-Z0-9\-]*\.)+[A-Z]{2,}$/i; +// const emailRegex = +// /^[a-z0-9.!#$%&’*+/=?^_`{|}~-]+@[a-z0-9-]+(?:\.[a-z0-9\-]+)*$/i; +// from https://thekevinscott.com/emojis-in-javascript/#writing-a-regular-expression +const _emojiRegex = `^(\\p{Extended_Pictographic}|\\p{Emoji_Component})+$`; +let emojiRegex; +// faster, simpler, safer +const ipv4Regex = /^(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\.){3}(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])$/; +const ipv4CidrRegex = /^(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\.){3}(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\/(3[0-2]|[12]?[0-9])$/; +// const ipv6Regex = +// /^(([a-f0-9]{1,4}:){7}|::([a-f0-9]{1,4}:){0,6}|([a-f0-9]{1,4}:){1}:([a-f0-9]{1,4}:){0,5}|([a-f0-9]{1,4}:){2}:([a-f0-9]{1,4}:){0,4}|([a-f0-9]{1,4}:){3}:([a-f0-9]{1,4}:){0,3}|([a-f0-9]{1,4}:){4}:([a-f0-9]{1,4}:){0,2}|([a-f0-9]{1,4}:){5}:([a-f0-9]{1,4}:){0,1})([a-f0-9]{1,4}|(((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2}))\.){3}((25[0-5])|(2[0-4][0-9])|(1[0-9]{2})|([0-9]{1,2})))$/; +const ipv6Regex = /^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$/; +const ipv6CidrRegex = /^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))\/(12[0-8]|1[01][0-9]|[1-9]?[0-9])$/; +// https://stackoverflow.com/questions/7860392/determine-if-string-is-in-base64-using-javascript +const base64Regex = /^([0-9a-zA-Z+/]{4})*(([0-9a-zA-Z+/]{2}==)|([0-9a-zA-Z+/]{3}=))?$/; +// https://base64.guru/standards/base64url +const base64urlRegex = /^([0-9a-zA-Z-_]{4})*(([0-9a-zA-Z-_]{2}(==)?)|([0-9a-zA-Z-_]{3}(=)?))?$/; +// simple +// const dateRegexSource = `\\d{4}-\\d{2}-\\d{2}`; +// no leap year validation +// const dateRegexSource = `\\d{4}-((0[13578]|10|12)-31|(0[13-9]|1[0-2])-30|(0[1-9]|1[0-2])-(0[1-9]|1\\d|2\\d))`; +// with leap year validation +const dateRegexSource = `((\\d\\d[2468][048]|\\d\\d[13579][26]|\\d\\d0[48]|[02468][048]00|[13579][26]00)-02-29|\\d{4}-((0[13578]|1[02])-(0[1-9]|[12]\\d|3[01])|(0[469]|11)-(0[1-9]|[12]\\d|30)|(02)-(0[1-9]|1\\d|2[0-8])))`; +const dateRegex = new RegExp(`^${dateRegexSource}$`); +function timeRegexSource(args) { + // let regex = `\\d{2}:\\d{2}:\\d{2}`; + let regex = `([01]\\d|2[0-3]):[0-5]\\d:[0-5]\\d`; + if (args.precision) { + regex = `${regex}\\.\\d{${args.precision}}`; + } + else if (args.precision == null) { + regex = `${regex}(\\.\\d+)?`; + } + return regex; +} +function timeRegex(args) { + return new RegExp(`^${timeRegexSource(args)}$`); +} +// Adapted from https://stackoverflow.com/a/3143231 +function datetimeRegex(args) { + let regex = `${dateRegexSource}T${timeRegexSource(args)}`; + const opts = []; + opts.push(args.local ? `Z?` : `Z`); + if (args.offset) + opts.push(`([+-]\\d{2}:?\\d{2})`); + regex = `${regex}(${opts.join("|")})`; + return new RegExp(`^${regex}$`); +} +exports.datetimeRegex = datetimeRegex; +function isValidIP(ip, version) { + if ((version === "v4" || !version) && ipv4Regex.test(ip)) { + return true; + } + if ((version === "v6" || !version) && ipv6Regex.test(ip)) { + return true; + } + return false; +} +function isValidJWT(jwt, alg) { + if (!jwtRegex.test(jwt)) + return false; + try { + const [header] = jwt.split("."); + // Convert base64url to base64 + const base64 = header + .replace(/-/g, "+") + .replace(/_/g, "/") + .padEnd(header.length + ((4 - (header.length % 4)) % 4), "="); + const decoded = JSON.parse(atob(base64)); + if (typeof decoded !== "object" || decoded === null) + return false; + if (!decoded.typ || !decoded.alg) + return false; + if (alg && decoded.alg !== alg) + return false; + return true; + } + catch (_a) { + return false; + } +} +function isValidCidr(ip, version) { + if ((version === "v4" || !version) && ipv4CidrRegex.test(ip)) { + return true; + } + if ((version === "v6" || !version) && ipv6CidrRegex.test(ip)) { + return true; + } + return false; +} +class ZodString extends ZodType { + _parse(input) { + if (this._def.coerce) { + input.data = String(input.data); + } + const parsedType = this._getType(input); + if (parsedType !== util_1.ZodParsedType.string) { + const ctx = this._getOrReturnCtx(input); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_type, + expected: util_1.ZodParsedType.string, + received: ctx.parsedType, + }); + return parseUtil_1.INVALID; + } + const status = new parseUtil_1.ParseStatus(); + let ctx = undefined; + for (const check of this._def.checks) { + if (check.kind === "min") { + if (input.data.length < check.value) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.too_small, + minimum: check.value, + type: "string", + inclusive: true, + exact: false, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "max") { + if (input.data.length > check.value) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.too_big, + maximum: check.value, + type: "string", + inclusive: true, + exact: false, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "length") { + const tooBig = input.data.length > check.value; + const tooSmall = input.data.length < check.value; + if (tooBig || tooSmall) { + ctx = this._getOrReturnCtx(input, ctx); + if (tooBig) { + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.too_big, + maximum: check.value, + type: "string", + inclusive: true, + exact: true, + message: check.message, + }); + } + else if (tooSmall) { + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.too_small, + minimum: check.value, + type: "string", + inclusive: true, + exact: true, + message: check.message, + }); + } + status.dirty(); + } + } + else if (check.kind === "email") { + if (!emailRegex.test(input.data)) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + validation: "email", + code: ZodError_1.ZodIssueCode.invalid_string, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "emoji") { + if (!emojiRegex) { + emojiRegex = new RegExp(_emojiRegex, "u"); + } + if (!emojiRegex.test(input.data)) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + validation: "emoji", + code: ZodError_1.ZodIssueCode.invalid_string, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "uuid") { + if (!uuidRegex.test(input.data)) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + validation: "uuid", + code: ZodError_1.ZodIssueCode.invalid_string, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "nanoid") { + if (!nanoidRegex.test(input.data)) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + validation: "nanoid", + code: ZodError_1.ZodIssueCode.invalid_string, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "cuid") { + if (!cuidRegex.test(input.data)) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + validation: "cuid", + code: ZodError_1.ZodIssueCode.invalid_string, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "cuid2") { + if (!cuid2Regex.test(input.data)) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + validation: "cuid2", + code: ZodError_1.ZodIssueCode.invalid_string, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "ulid") { + if (!ulidRegex.test(input.data)) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + validation: "ulid", + code: ZodError_1.ZodIssueCode.invalid_string, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "url") { + try { + new URL(input.data); + } + catch (_a) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + validation: "url", + code: ZodError_1.ZodIssueCode.invalid_string, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "regex") { + check.regex.lastIndex = 0; + const testResult = check.regex.test(input.data); + if (!testResult) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + validation: "regex", + code: ZodError_1.ZodIssueCode.invalid_string, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "trim") { + input.data = input.data.trim(); + } + else if (check.kind === "includes") { + if (!input.data.includes(check.value, check.position)) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_string, + validation: { includes: check.value, position: check.position }, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "toLowerCase") { + input.data = input.data.toLowerCase(); + } + else if (check.kind === "toUpperCase") { + input.data = input.data.toUpperCase(); + } + else if (check.kind === "startsWith") { + if (!input.data.startsWith(check.value)) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_string, + validation: { startsWith: check.value }, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "endsWith") { + if (!input.data.endsWith(check.value)) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_string, + validation: { endsWith: check.value }, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "datetime") { + const regex = datetimeRegex(check); + if (!regex.test(input.data)) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_string, + validation: "datetime", + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "date") { + const regex = dateRegex; + if (!regex.test(input.data)) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_string, + validation: "date", + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "time") { + const regex = timeRegex(check); + if (!regex.test(input.data)) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_string, + validation: "time", + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "duration") { + if (!durationRegex.test(input.data)) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + validation: "duration", + code: ZodError_1.ZodIssueCode.invalid_string, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "ip") { + if (!isValidIP(input.data, check.version)) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + validation: "ip", + code: ZodError_1.ZodIssueCode.invalid_string, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "jwt") { + if (!isValidJWT(input.data, check.alg)) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + validation: "jwt", + code: ZodError_1.ZodIssueCode.invalid_string, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "cidr") { + if (!isValidCidr(input.data, check.version)) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + validation: "cidr", + code: ZodError_1.ZodIssueCode.invalid_string, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "base64") { + if (!base64Regex.test(input.data)) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + validation: "base64", + code: ZodError_1.ZodIssueCode.invalid_string, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "base64url") { + if (!base64urlRegex.test(input.data)) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + validation: "base64url", + code: ZodError_1.ZodIssueCode.invalid_string, + message: check.message, + }); + status.dirty(); + } + } + else { + util_1.util.assertNever(check); + } + } + return { status: status.value, value: input.data }; + } + _regex(regex, validation, message) { + return this.refinement((data) => regex.test(data), { + validation, + code: ZodError_1.ZodIssueCode.invalid_string, + ...errorUtil_1.errorUtil.errToObj(message), + }); + } + _addCheck(check) { + return new ZodString({ + ...this._def, + checks: [...this._def.checks, check], + }); + } + email(message) { + return this._addCheck({ kind: "email", ...errorUtil_1.errorUtil.errToObj(message) }); + } + url(message) { + return this._addCheck({ kind: "url", ...errorUtil_1.errorUtil.errToObj(message) }); + } + emoji(message) { + return this._addCheck({ kind: "emoji", ...errorUtil_1.errorUtil.errToObj(message) }); + } + uuid(message) { + return this._addCheck({ kind: "uuid", ...errorUtil_1.errorUtil.errToObj(message) }); + } + nanoid(message) { + return this._addCheck({ kind: "nanoid", ...errorUtil_1.errorUtil.errToObj(message) }); + } + cuid(message) { + return this._addCheck({ kind: "cuid", ...errorUtil_1.errorUtil.errToObj(message) }); + } + cuid2(message) { + return this._addCheck({ kind: "cuid2", ...errorUtil_1.errorUtil.errToObj(message) }); + } + ulid(message) { + return this._addCheck({ kind: "ulid", ...errorUtil_1.errorUtil.errToObj(message) }); + } + base64(message) { + return this._addCheck({ kind: "base64", ...errorUtil_1.errorUtil.errToObj(message) }); + } + base64url(message) { + // base64url encoding is a modification of base64 that can safely be used in URLs and filenames + return this._addCheck({ + kind: "base64url", + ...errorUtil_1.errorUtil.errToObj(message), + }); + } + jwt(options) { + return this._addCheck({ kind: "jwt", ...errorUtil_1.errorUtil.errToObj(options) }); + } + ip(options) { + return this._addCheck({ kind: "ip", ...errorUtil_1.errorUtil.errToObj(options) }); + } + cidr(options) { + return this._addCheck({ kind: "cidr", ...errorUtil_1.errorUtil.errToObj(options) }); + } + datetime(options) { + var _a, _b; + if (typeof options === "string") { + return this._addCheck({ + kind: "datetime", + precision: null, + offset: false, + local: false, + message: options, + }); + } + return this._addCheck({ + kind: "datetime", + precision: typeof (options === null || options === void 0 ? void 0 : options.precision) === "undefined" ? null : options === null || options === void 0 ? void 0 : options.precision, + offset: (_a = options === null || options === void 0 ? void 0 : options.offset) !== null && _a !== void 0 ? _a : false, + local: (_b = options === null || options === void 0 ? void 0 : options.local) !== null && _b !== void 0 ? _b : false, + ...errorUtil_1.errorUtil.errToObj(options === null || options === void 0 ? void 0 : options.message), + }); + } + date(message) { + return this._addCheck({ kind: "date", message }); + } + time(options) { + if (typeof options === "string") { + return this._addCheck({ + kind: "time", + precision: null, + message: options, + }); + } + return this._addCheck({ + kind: "time", + precision: typeof (options === null || options === void 0 ? void 0 : options.precision) === "undefined" ? null : options === null || options === void 0 ? void 0 : options.precision, + ...errorUtil_1.errorUtil.errToObj(options === null || options === void 0 ? void 0 : options.message), + }); + } + duration(message) { + return this._addCheck({ kind: "duration", ...errorUtil_1.errorUtil.errToObj(message) }); + } + regex(regex, message) { + return this._addCheck({ + kind: "regex", + regex: regex, + ...errorUtil_1.errorUtil.errToObj(message), + }); + } + includes(value, options) { + return this._addCheck({ + kind: "includes", + value: value, + position: options === null || options === void 0 ? void 0 : options.position, + ...errorUtil_1.errorUtil.errToObj(options === null || options === void 0 ? void 0 : options.message), + }); + } + startsWith(value, message) { + return this._addCheck({ + kind: "startsWith", + value: value, + ...errorUtil_1.errorUtil.errToObj(message), + }); + } + endsWith(value, message) { + return this._addCheck({ + kind: "endsWith", + value: value, + ...errorUtil_1.errorUtil.errToObj(message), + }); + } + min(minLength, message) { + return this._addCheck({ + kind: "min", + value: minLength, + ...errorUtil_1.errorUtil.errToObj(message), + }); + } + max(maxLength, message) { + return this._addCheck({ + kind: "max", + value: maxLength, + ...errorUtil_1.errorUtil.errToObj(message), + }); + } + length(len, message) { + return this._addCheck({ + kind: "length", + value: len, + ...errorUtil_1.errorUtil.errToObj(message), + }); + } + /** + * Equivalent to `.min(1)` + */ + nonempty(message) { + return this.min(1, errorUtil_1.errorUtil.errToObj(message)); + } + trim() { + return new ZodString({ + ...this._def, + checks: [...this._def.checks, { kind: "trim" }], + }); + } + toLowerCase() { + return new ZodString({ + ...this._def, + checks: [...this._def.checks, { kind: "toLowerCase" }], + }); + } + toUpperCase() { + return new ZodString({ + ...this._def, + checks: [...this._def.checks, { kind: "toUpperCase" }], + }); + } + get isDatetime() { + return !!this._def.checks.find((ch) => ch.kind === "datetime"); + } + get isDate() { + return !!this._def.checks.find((ch) => ch.kind === "date"); + } + get isTime() { + return !!this._def.checks.find((ch) => ch.kind === "time"); + } + get isDuration() { + return !!this._def.checks.find((ch) => ch.kind === "duration"); + } + get isEmail() { + return !!this._def.checks.find((ch) => ch.kind === "email"); + } + get isURL() { + return !!this._def.checks.find((ch) => ch.kind === "url"); + } + get isEmoji() { + return !!this._def.checks.find((ch) => ch.kind === "emoji"); + } + get isUUID() { + return !!this._def.checks.find((ch) => ch.kind === "uuid"); + } + get isNANOID() { + return !!this._def.checks.find((ch) => ch.kind === "nanoid"); + } + get isCUID() { + return !!this._def.checks.find((ch) => ch.kind === "cuid"); + } + get isCUID2() { + return !!this._def.checks.find((ch) => ch.kind === "cuid2"); + } + get isULID() { + return !!this._def.checks.find((ch) => ch.kind === "ulid"); + } + get isIP() { + return !!this._def.checks.find((ch) => ch.kind === "ip"); + } + get isCIDR() { + return !!this._def.checks.find((ch) => ch.kind === "cidr"); + } + get isBase64() { + return !!this._def.checks.find((ch) => ch.kind === "base64"); + } + get isBase64url() { + // base64url encoding is a modification of base64 that can safely be used in URLs and filenames + return !!this._def.checks.find((ch) => ch.kind === "base64url"); + } + get minLength() { + let min = null; + for (const ch of this._def.checks) { + if (ch.kind === "min") { + if (min === null || ch.value > min) + min = ch.value; + } + } + return min; + } + get maxLength() { + let max = null; + for (const ch of this._def.checks) { + if (ch.kind === "max") { + if (max === null || ch.value < max) + max = ch.value; + } + } + return max; + } +} +exports.ZodString = ZodString; +ZodString.create = (params) => { + var _a; + return new ZodString({ + checks: [], + typeName: ZodFirstPartyTypeKind.ZodString, + coerce: (_a = params === null || params === void 0 ? void 0 : params.coerce) !== null && _a !== void 0 ? _a : false, + ...processCreateParams(params), + }); +}; +// https://stackoverflow.com/questions/3966484/why-does-modulus-operator-return-fractional-number-in-javascript/31711034#31711034 +function floatSafeRemainder(val, step) { + const valDecCount = (val.toString().split(".")[1] || "").length; + const stepDecCount = (step.toString().split(".")[1] || "").length; + const decCount = valDecCount > stepDecCount ? valDecCount : stepDecCount; + const valInt = parseInt(val.toFixed(decCount).replace(".", "")); + const stepInt = parseInt(step.toFixed(decCount).replace(".", "")); + return (valInt % stepInt) / Math.pow(10, decCount); +} +class ZodNumber extends ZodType { + constructor() { + super(...arguments); + this.min = this.gte; + this.max = this.lte; + this.step = this.multipleOf; + } + _parse(input) { + if (this._def.coerce) { + input.data = Number(input.data); + } + const parsedType = this._getType(input); + if (parsedType !== util_1.ZodParsedType.number) { + const ctx = this._getOrReturnCtx(input); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_type, + expected: util_1.ZodParsedType.number, + received: ctx.parsedType, + }); + return parseUtil_1.INVALID; + } + let ctx = undefined; + const status = new parseUtil_1.ParseStatus(); + for (const check of this._def.checks) { + if (check.kind === "int") { + if (!util_1.util.isInteger(input.data)) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_type, + expected: "integer", + received: "float", + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "min") { + const tooSmall = check.inclusive + ? input.data < check.value + : input.data <= check.value; + if (tooSmall) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.too_small, + minimum: check.value, + type: "number", + inclusive: check.inclusive, + exact: false, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "max") { + const tooBig = check.inclusive + ? input.data > check.value + : input.data >= check.value; + if (tooBig) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.too_big, + maximum: check.value, + type: "number", + inclusive: check.inclusive, + exact: false, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "multipleOf") { + if (floatSafeRemainder(input.data, check.value) !== 0) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.not_multiple_of, + multipleOf: check.value, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "finite") { + if (!Number.isFinite(input.data)) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.not_finite, + message: check.message, + }); + status.dirty(); + } + } + else { + util_1.util.assertNever(check); + } + } + return { status: status.value, value: input.data }; + } + gte(value, message) { + return this.setLimit("min", value, true, errorUtil_1.errorUtil.toString(message)); + } + gt(value, message) { + return this.setLimit("min", value, false, errorUtil_1.errorUtil.toString(message)); + } + lte(value, message) { + return this.setLimit("max", value, true, errorUtil_1.errorUtil.toString(message)); + } + lt(value, message) { + return this.setLimit("max", value, false, errorUtil_1.errorUtil.toString(message)); + } + setLimit(kind, value, inclusive, message) { + return new ZodNumber({ + ...this._def, + checks: [ + ...this._def.checks, + { + kind, + value, + inclusive, + message: errorUtil_1.errorUtil.toString(message), + }, + ], + }); + } + _addCheck(check) { + return new ZodNumber({ + ...this._def, + checks: [...this._def.checks, check], + }); + } + int(message) { + return this._addCheck({ + kind: "int", + message: errorUtil_1.errorUtil.toString(message), + }); + } + positive(message) { + return this._addCheck({ + kind: "min", + value: 0, + inclusive: false, + message: errorUtil_1.errorUtil.toString(message), + }); + } + negative(message) { + return this._addCheck({ + kind: "max", + value: 0, + inclusive: false, + message: errorUtil_1.errorUtil.toString(message), + }); + } + nonpositive(message) { + return this._addCheck({ + kind: "max", + value: 0, + inclusive: true, + message: errorUtil_1.errorUtil.toString(message), + }); + } + nonnegative(message) { + return this._addCheck({ + kind: "min", + value: 0, + inclusive: true, + message: errorUtil_1.errorUtil.toString(message), + }); + } + multipleOf(value, message) { + return this._addCheck({ + kind: "multipleOf", + value: value, + message: errorUtil_1.errorUtil.toString(message), + }); + } + finite(message) { + return this._addCheck({ + kind: "finite", + message: errorUtil_1.errorUtil.toString(message), + }); + } + safe(message) { + return this._addCheck({ + kind: "min", + inclusive: true, + value: Number.MIN_SAFE_INTEGER, + message: errorUtil_1.errorUtil.toString(message), + })._addCheck({ + kind: "max", + inclusive: true, + value: Number.MAX_SAFE_INTEGER, + message: errorUtil_1.errorUtil.toString(message), + }); + } + get minValue() { + let min = null; + for (const ch of this._def.checks) { + if (ch.kind === "min") { + if (min === null || ch.value > min) + min = ch.value; + } + } + return min; + } + get maxValue() { + let max = null; + for (const ch of this._def.checks) { + if (ch.kind === "max") { + if (max === null || ch.value < max) + max = ch.value; + } + } + return max; + } + get isInt() { + return !!this._def.checks.find((ch) => ch.kind === "int" || + (ch.kind === "multipleOf" && util_1.util.isInteger(ch.value))); + } + get isFinite() { + let max = null, min = null; + for (const ch of this._def.checks) { + if (ch.kind === "finite" || + ch.kind === "int" || + ch.kind === "multipleOf") { + return true; + } + else if (ch.kind === "min") { + if (min === null || ch.value > min) + min = ch.value; + } + else if (ch.kind === "max") { + if (max === null || ch.value < max) + max = ch.value; + } + } + return Number.isFinite(min) && Number.isFinite(max); + } +} +exports.ZodNumber = ZodNumber; +ZodNumber.create = (params) => { + return new ZodNumber({ + checks: [], + typeName: ZodFirstPartyTypeKind.ZodNumber, + coerce: (params === null || params === void 0 ? void 0 : params.coerce) || false, + ...processCreateParams(params), + }); +}; +class ZodBigInt extends ZodType { + constructor() { + super(...arguments); + this.min = this.gte; + this.max = this.lte; + } + _parse(input) { + if (this._def.coerce) { + try { + input.data = BigInt(input.data); + } + catch (_a) { + return this._getInvalidInput(input); + } + } + const parsedType = this._getType(input); + if (parsedType !== util_1.ZodParsedType.bigint) { + return this._getInvalidInput(input); + } + let ctx = undefined; + const status = new parseUtil_1.ParseStatus(); + for (const check of this._def.checks) { + if (check.kind === "min") { + const tooSmall = check.inclusive + ? input.data < check.value + : input.data <= check.value; + if (tooSmall) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.too_small, + type: "bigint", + minimum: check.value, + inclusive: check.inclusive, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "max") { + const tooBig = check.inclusive + ? input.data > check.value + : input.data >= check.value; + if (tooBig) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.too_big, + type: "bigint", + maximum: check.value, + inclusive: check.inclusive, + message: check.message, + }); + status.dirty(); + } + } + else if (check.kind === "multipleOf") { + if (input.data % check.value !== BigInt(0)) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.not_multiple_of, + multipleOf: check.value, + message: check.message, + }); + status.dirty(); + } + } + else { + util_1.util.assertNever(check); + } + } + return { status: status.value, value: input.data }; + } + _getInvalidInput(input) { + const ctx = this._getOrReturnCtx(input); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_type, + expected: util_1.ZodParsedType.bigint, + received: ctx.parsedType, + }); + return parseUtil_1.INVALID; + } + gte(value, message) { + return this.setLimit("min", value, true, errorUtil_1.errorUtil.toString(message)); + } + gt(value, message) { + return this.setLimit("min", value, false, errorUtil_1.errorUtil.toString(message)); + } + lte(value, message) { + return this.setLimit("max", value, true, errorUtil_1.errorUtil.toString(message)); + } + lt(value, message) { + return this.setLimit("max", value, false, errorUtil_1.errorUtil.toString(message)); + } + setLimit(kind, value, inclusive, message) { + return new ZodBigInt({ + ...this._def, + checks: [ + ...this._def.checks, + { + kind, + value, + inclusive, + message: errorUtil_1.errorUtil.toString(message), + }, + ], + }); + } + _addCheck(check) { + return new ZodBigInt({ + ...this._def, + checks: [...this._def.checks, check], + }); + } + positive(message) { + return this._addCheck({ + kind: "min", + value: BigInt(0), + inclusive: false, + message: errorUtil_1.errorUtil.toString(message), + }); + } + negative(message) { + return this._addCheck({ + kind: "max", + value: BigInt(0), + inclusive: false, + message: errorUtil_1.errorUtil.toString(message), + }); + } + nonpositive(message) { + return this._addCheck({ + kind: "max", + value: BigInt(0), + inclusive: true, + message: errorUtil_1.errorUtil.toString(message), + }); + } + nonnegative(message) { + return this._addCheck({ + kind: "min", + value: BigInt(0), + inclusive: true, + message: errorUtil_1.errorUtil.toString(message), + }); + } + multipleOf(value, message) { + return this._addCheck({ + kind: "multipleOf", + value, + message: errorUtil_1.errorUtil.toString(message), + }); + } + get minValue() { + let min = null; + for (const ch of this._def.checks) { + if (ch.kind === "min") { + if (min === null || ch.value > min) + min = ch.value; + } + } + return min; + } + get maxValue() { + let max = null; + for (const ch of this._def.checks) { + if (ch.kind === "max") { + if (max === null || ch.value < max) + max = ch.value; + } + } + return max; + } +} +exports.ZodBigInt = ZodBigInt; +ZodBigInt.create = (params) => { + var _a; + return new ZodBigInt({ + checks: [], + typeName: ZodFirstPartyTypeKind.ZodBigInt, + coerce: (_a = params === null || params === void 0 ? void 0 : params.coerce) !== null && _a !== void 0 ? _a : false, + ...processCreateParams(params), + }); +}; +class ZodBoolean extends ZodType { + _parse(input) { + if (this._def.coerce) { + input.data = Boolean(input.data); + } + const parsedType = this._getType(input); + if (parsedType !== util_1.ZodParsedType.boolean) { + const ctx = this._getOrReturnCtx(input); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_type, + expected: util_1.ZodParsedType.boolean, + received: ctx.parsedType, + }); + return parseUtil_1.INVALID; + } + return (0, parseUtil_1.OK)(input.data); + } +} +exports.ZodBoolean = ZodBoolean; +ZodBoolean.create = (params) => { + return new ZodBoolean({ + typeName: ZodFirstPartyTypeKind.ZodBoolean, + coerce: (params === null || params === void 0 ? void 0 : params.coerce) || false, + ...processCreateParams(params), + }); +}; +class ZodDate extends ZodType { + _parse(input) { + if (this._def.coerce) { + input.data = new Date(input.data); + } + const parsedType = this._getType(input); + if (parsedType !== util_1.ZodParsedType.date) { + const ctx = this._getOrReturnCtx(input); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_type, + expected: util_1.ZodParsedType.date, + received: ctx.parsedType, + }); + return parseUtil_1.INVALID; + } + if (isNaN(input.data.getTime())) { + const ctx = this._getOrReturnCtx(input); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_date, + }); + return parseUtil_1.INVALID; + } + const status = new parseUtil_1.ParseStatus(); + let ctx = undefined; + for (const check of this._def.checks) { + if (check.kind === "min") { + if (input.data.getTime() < check.value) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.too_small, + message: check.message, + inclusive: true, + exact: false, + minimum: check.value, + type: "date", + }); + status.dirty(); + } + } + else if (check.kind === "max") { + if (input.data.getTime() > check.value) { + ctx = this._getOrReturnCtx(input, ctx); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.too_big, + message: check.message, + inclusive: true, + exact: false, + maximum: check.value, + type: "date", + }); + status.dirty(); + } + } + else { + util_1.util.assertNever(check); + } + } + return { + status: status.value, + value: new Date(input.data.getTime()), + }; + } + _addCheck(check) { + return new ZodDate({ + ...this._def, + checks: [...this._def.checks, check], + }); + } + min(minDate, message) { + return this._addCheck({ + kind: "min", + value: minDate.getTime(), + message: errorUtil_1.errorUtil.toString(message), + }); + } + max(maxDate, message) { + return this._addCheck({ + kind: "max", + value: maxDate.getTime(), + message: errorUtil_1.errorUtil.toString(message), + }); + } + get minDate() { + let min = null; + for (const ch of this._def.checks) { + if (ch.kind === "min") { + if (min === null || ch.value > min) + min = ch.value; + } + } + return min != null ? new Date(min) : null; + } + get maxDate() { + let max = null; + for (const ch of this._def.checks) { + if (ch.kind === "max") { + if (max === null || ch.value < max) + max = ch.value; + } + } + return max != null ? new Date(max) : null; + } +} +exports.ZodDate = ZodDate; +ZodDate.create = (params) => { + return new ZodDate({ + checks: [], + coerce: (params === null || params === void 0 ? void 0 : params.coerce) || false, + typeName: ZodFirstPartyTypeKind.ZodDate, + ...processCreateParams(params), + }); +}; +class ZodSymbol extends ZodType { + _parse(input) { + const parsedType = this._getType(input); + if (parsedType !== util_1.ZodParsedType.symbol) { + const ctx = this._getOrReturnCtx(input); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_type, + expected: util_1.ZodParsedType.symbol, + received: ctx.parsedType, + }); + return parseUtil_1.INVALID; + } + return (0, parseUtil_1.OK)(input.data); + } +} +exports.ZodSymbol = ZodSymbol; +ZodSymbol.create = (params) => { + return new ZodSymbol({ + typeName: ZodFirstPartyTypeKind.ZodSymbol, + ...processCreateParams(params), + }); +}; +class ZodUndefined extends ZodType { + _parse(input) { + const parsedType = this._getType(input); + if (parsedType !== util_1.ZodParsedType.undefined) { + const ctx = this._getOrReturnCtx(input); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_type, + expected: util_1.ZodParsedType.undefined, + received: ctx.parsedType, + }); + return parseUtil_1.INVALID; + } + return (0, parseUtil_1.OK)(input.data); + } +} +exports.ZodUndefined = ZodUndefined; +ZodUndefined.create = (params) => { + return new ZodUndefined({ + typeName: ZodFirstPartyTypeKind.ZodUndefined, + ...processCreateParams(params), + }); +}; +class ZodNull extends ZodType { + _parse(input) { + const parsedType = this._getType(input); + if (parsedType !== util_1.ZodParsedType.null) { + const ctx = this._getOrReturnCtx(input); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_type, + expected: util_1.ZodParsedType.null, + received: ctx.parsedType, + }); + return parseUtil_1.INVALID; + } + return (0, parseUtil_1.OK)(input.data); + } +} +exports.ZodNull = ZodNull; +ZodNull.create = (params) => { + return new ZodNull({ + typeName: ZodFirstPartyTypeKind.ZodNull, + ...processCreateParams(params), + }); +}; +class ZodAny extends ZodType { + constructor() { + super(...arguments); + // to prevent instances of other classes from extending ZodAny. this causes issues with catchall in ZodObject. + this._any = true; + } + _parse(input) { + return (0, parseUtil_1.OK)(input.data); + } +} +exports.ZodAny = ZodAny; +ZodAny.create = (params) => { + return new ZodAny({ + typeName: ZodFirstPartyTypeKind.ZodAny, + ...processCreateParams(params), + }); +}; +class ZodUnknown extends ZodType { + constructor() { + super(...arguments); + // required + this._unknown = true; + } + _parse(input) { + return (0, parseUtil_1.OK)(input.data); + } +} +exports.ZodUnknown = ZodUnknown; +ZodUnknown.create = (params) => { + return new ZodUnknown({ + typeName: ZodFirstPartyTypeKind.ZodUnknown, + ...processCreateParams(params), + }); +}; +class ZodNever extends ZodType { + _parse(input) { + const ctx = this._getOrReturnCtx(input); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_type, + expected: util_1.ZodParsedType.never, + received: ctx.parsedType, + }); + return parseUtil_1.INVALID; + } +} +exports.ZodNever = ZodNever; +ZodNever.create = (params) => { + return new ZodNever({ + typeName: ZodFirstPartyTypeKind.ZodNever, + ...processCreateParams(params), + }); +}; +class ZodVoid extends ZodType { + _parse(input) { + const parsedType = this._getType(input); + if (parsedType !== util_1.ZodParsedType.undefined) { + const ctx = this._getOrReturnCtx(input); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_type, + expected: util_1.ZodParsedType.void, + received: ctx.parsedType, + }); + return parseUtil_1.INVALID; + } + return (0, parseUtil_1.OK)(input.data); + } +} +exports.ZodVoid = ZodVoid; +ZodVoid.create = (params) => { + return new ZodVoid({ + typeName: ZodFirstPartyTypeKind.ZodVoid, + ...processCreateParams(params), + }); +}; +class ZodArray extends ZodType { + _parse(input) { + const { ctx, status } = this._processInputParams(input); + const def = this._def; + if (ctx.parsedType !== util_1.ZodParsedType.array) { + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_type, + expected: util_1.ZodParsedType.array, + received: ctx.parsedType, + }); + return parseUtil_1.INVALID; + } + if (def.exactLength !== null) { + const tooBig = ctx.data.length > def.exactLength.value; + const tooSmall = ctx.data.length < def.exactLength.value; + if (tooBig || tooSmall) { + (0, parseUtil_1.addIssueToContext)(ctx, { + code: tooBig ? ZodError_1.ZodIssueCode.too_big : ZodError_1.ZodIssueCode.too_small, + minimum: (tooSmall ? def.exactLength.value : undefined), + maximum: (tooBig ? def.exactLength.value : undefined), + type: "array", + inclusive: true, + exact: true, + message: def.exactLength.message, + }); + status.dirty(); + } + } + if (def.minLength !== null) { + if (ctx.data.length < def.minLength.value) { + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.too_small, + minimum: def.minLength.value, + type: "array", + inclusive: true, + exact: false, + message: def.minLength.message, + }); + status.dirty(); + } + } + if (def.maxLength !== null) { + if (ctx.data.length > def.maxLength.value) { + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.too_big, + maximum: def.maxLength.value, + type: "array", + inclusive: true, + exact: false, + message: def.maxLength.message, + }); + status.dirty(); + } + } + if (ctx.common.async) { + return Promise.all([...ctx.data].map((item, i) => { + return def.type._parseAsync(new ParseInputLazyPath(ctx, item, ctx.path, i)); + })).then((result) => { + return parseUtil_1.ParseStatus.mergeArray(status, result); + }); + } + const result = [...ctx.data].map((item, i) => { + return def.type._parseSync(new ParseInputLazyPath(ctx, item, ctx.path, i)); + }); + return parseUtil_1.ParseStatus.mergeArray(status, result); + } + get element() { + return this._def.type; + } + min(minLength, message) { + return new ZodArray({ + ...this._def, + minLength: { value: minLength, message: errorUtil_1.errorUtil.toString(message) }, + }); + } + max(maxLength, message) { + return new ZodArray({ + ...this._def, + maxLength: { value: maxLength, message: errorUtil_1.errorUtil.toString(message) }, + }); + } + length(len, message) { + return new ZodArray({ + ...this._def, + exactLength: { value: len, message: errorUtil_1.errorUtil.toString(message) }, + }); + } + nonempty(message) { + return this.min(1, message); + } +} +exports.ZodArray = ZodArray; +ZodArray.create = (schema, params) => { + return new ZodArray({ + type: schema, + minLength: null, + maxLength: null, + exactLength: null, + typeName: ZodFirstPartyTypeKind.ZodArray, + ...processCreateParams(params), + }); +}; +function deepPartialify(schema) { + if (schema instanceof ZodObject) { + const newShape = {}; + for (const key in schema.shape) { + const fieldSchema = schema.shape[key]; + newShape[key] = ZodOptional.create(deepPartialify(fieldSchema)); + } + return new ZodObject({ + ...schema._def, + shape: () => newShape, + }); + } + else if (schema instanceof ZodArray) { + return new ZodArray({ + ...schema._def, + type: deepPartialify(schema.element), + }); + } + else if (schema instanceof ZodOptional) { + return ZodOptional.create(deepPartialify(schema.unwrap())); + } + else if (schema instanceof ZodNullable) { + return ZodNullable.create(deepPartialify(schema.unwrap())); + } + else if (schema instanceof ZodTuple) { + return ZodTuple.create(schema.items.map((item) => deepPartialify(item))); + } + else { + return schema; + } +} +class ZodObject extends ZodType { + constructor() { + super(...arguments); + this._cached = null; + /** + * @deprecated In most cases, this is no longer needed - unknown properties are now silently stripped. + * If you want to pass through unknown properties, use `.passthrough()` instead. + */ + this.nonstrict = this.passthrough; + // extend< + // Augmentation extends ZodRawShape, + // NewOutput extends util.flatten<{ + // [k in keyof Augmentation | keyof Output]: k extends keyof Augmentation + // ? Augmentation[k]["_output"] + // : k extends keyof Output + // ? Output[k] + // : never; + // }>, + // NewInput extends util.flatten<{ + // [k in keyof Augmentation | keyof Input]: k extends keyof Augmentation + // ? Augmentation[k]["_input"] + // : k extends keyof Input + // ? Input[k] + // : never; + // }> + // >( + // augmentation: Augmentation + // ): ZodObject< + // extendShape, + // UnknownKeys, + // Catchall, + // NewOutput, + // NewInput + // > { + // return new ZodObject({ + // ...this._def, + // shape: () => ({ + // ...this._def.shape(), + // ...augmentation, + // }), + // }) as any; + // } + /** + * @deprecated Use `.extend` instead + * */ + this.augment = this.extend; + } + _getCached() { + if (this._cached !== null) + return this._cached; + const shape = this._def.shape(); + const keys = util_1.util.objectKeys(shape); + return (this._cached = { shape, keys }); + } + _parse(input) { + const parsedType = this._getType(input); + if (parsedType !== util_1.ZodParsedType.object) { + const ctx = this._getOrReturnCtx(input); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_type, + expected: util_1.ZodParsedType.object, + received: ctx.parsedType, + }); + return parseUtil_1.INVALID; + } + const { status, ctx } = this._processInputParams(input); + const { shape, keys: shapeKeys } = this._getCached(); + const extraKeys = []; + if (!(this._def.catchall instanceof ZodNever && + this._def.unknownKeys === "strip")) { + for (const key in ctx.data) { + if (!shapeKeys.includes(key)) { + extraKeys.push(key); + } + } + } + const pairs = []; + for (const key of shapeKeys) { + const keyValidator = shape[key]; + const value = ctx.data[key]; + pairs.push({ + key: { status: "valid", value: key }, + value: keyValidator._parse(new ParseInputLazyPath(ctx, value, ctx.path, key)), + alwaysSet: key in ctx.data, + }); + } + if (this._def.catchall instanceof ZodNever) { + const unknownKeys = this._def.unknownKeys; + if (unknownKeys === "passthrough") { + for (const key of extraKeys) { + pairs.push({ + key: { status: "valid", value: key }, + value: { status: "valid", value: ctx.data[key] }, + }); + } + } + else if (unknownKeys === "strict") { + if (extraKeys.length > 0) { + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.unrecognized_keys, + keys: extraKeys, + }); + status.dirty(); + } + } + else if (unknownKeys === "strip") { + } + else { + throw new Error(`Internal ZodObject error: invalid unknownKeys value.`); + } + } + else { + // run catchall validation + const catchall = this._def.catchall; + for (const key of extraKeys) { + const value = ctx.data[key]; + pairs.push({ + key: { status: "valid", value: key }, + value: catchall._parse(new ParseInputLazyPath(ctx, value, ctx.path, key) //, ctx.child(key), value, getParsedType(value) + ), + alwaysSet: key in ctx.data, + }); + } + } + if (ctx.common.async) { + return Promise.resolve() + .then(async () => { + const syncPairs = []; + for (const pair of pairs) { + const key = await pair.key; + const value = await pair.value; + syncPairs.push({ + key, + value, + alwaysSet: pair.alwaysSet, + }); + } + return syncPairs; + }) + .then((syncPairs) => { + return parseUtil_1.ParseStatus.mergeObjectSync(status, syncPairs); + }); + } + else { + return parseUtil_1.ParseStatus.mergeObjectSync(status, pairs); + } + } + get shape() { + return this._def.shape(); + } + strict(message) { + errorUtil_1.errorUtil.errToObj; + return new ZodObject({ + ...this._def, + unknownKeys: "strict", + ...(message !== undefined + ? { + errorMap: (issue, ctx) => { + var _a, _b, _c, _d; + const defaultError = (_c = (_b = (_a = this._def).errorMap) === null || _b === void 0 ? void 0 : _b.call(_a, issue, ctx).message) !== null && _c !== void 0 ? _c : ctx.defaultError; + if (issue.code === "unrecognized_keys") + return { + message: (_d = errorUtil_1.errorUtil.errToObj(message).message) !== null && _d !== void 0 ? _d : defaultError, + }; + return { + message: defaultError, + }; + }, + } + : {}), + }); + } + strip() { + return new ZodObject({ + ...this._def, + unknownKeys: "strip", + }); + } + passthrough() { + return new ZodObject({ + ...this._def, + unknownKeys: "passthrough", + }); + } + // const AugmentFactory = + // (def: Def) => + // ( + // augmentation: Augmentation + // ): ZodObject< + // extendShape, Augmentation>, + // Def["unknownKeys"], + // Def["catchall"] + // > => { + // return new ZodObject({ + // ...def, + // shape: () => ({ + // ...def.shape(), + // ...augmentation, + // }), + // }) as any; + // }; + extend(augmentation) { + return new ZodObject({ + ...this._def, + shape: () => ({ + ...this._def.shape(), + ...augmentation, + }), + }); + } + /** + * Prior to zod@1.0.12 there was a bug in the + * inferred type of merged objects. Please + * upgrade if you are experiencing issues. + */ + merge(merging) { + const merged = new ZodObject({ + unknownKeys: merging._def.unknownKeys, + catchall: merging._def.catchall, + shape: () => ({ + ...this._def.shape(), + ...merging._def.shape(), + }), + typeName: ZodFirstPartyTypeKind.ZodObject, + }); + return merged; + } + // merge< + // Incoming extends AnyZodObject, + // Augmentation extends Incoming["shape"], + // NewOutput extends { + // [k in keyof Augmentation | keyof Output]: k extends keyof Augmentation + // ? Augmentation[k]["_output"] + // : k extends keyof Output + // ? Output[k] + // : never; + // }, + // NewInput extends { + // [k in keyof Augmentation | keyof Input]: k extends keyof Augmentation + // ? Augmentation[k]["_input"] + // : k extends keyof Input + // ? Input[k] + // : never; + // } + // >( + // merging: Incoming + // ): ZodObject< + // extendShape>, + // Incoming["_def"]["unknownKeys"], + // Incoming["_def"]["catchall"], + // NewOutput, + // NewInput + // > { + // const merged: any = new ZodObject({ + // unknownKeys: merging._def.unknownKeys, + // catchall: merging._def.catchall, + // shape: () => + // objectUtil.mergeShapes(this._def.shape(), merging._def.shape()), + // typeName: ZodFirstPartyTypeKind.ZodObject, + // }) as any; + // return merged; + // } + setKey(key, schema) { + return this.augment({ [key]: schema }); + } + // merge( + // merging: Incoming + // ): //ZodObject = (merging) => { + // ZodObject< + // extendShape>, + // Incoming["_def"]["unknownKeys"], + // Incoming["_def"]["catchall"] + // > { + // // const mergedShape = objectUtil.mergeShapes( + // // this._def.shape(), + // // merging._def.shape() + // // ); + // const merged: any = new ZodObject({ + // unknownKeys: merging._def.unknownKeys, + // catchall: merging._def.catchall, + // shape: () => + // objectUtil.mergeShapes(this._def.shape(), merging._def.shape()), + // typeName: ZodFirstPartyTypeKind.ZodObject, + // }) as any; + // return merged; + // } + catchall(index) { + return new ZodObject({ + ...this._def, + catchall: index, + }); + } + pick(mask) { + const shape = {}; + util_1.util.objectKeys(mask).forEach((key) => { + if (mask[key] && this.shape[key]) { + shape[key] = this.shape[key]; + } + }); + return new ZodObject({ + ...this._def, + shape: () => shape, + }); + } + omit(mask) { + const shape = {}; + util_1.util.objectKeys(this.shape).forEach((key) => { + if (!mask[key]) { + shape[key] = this.shape[key]; + } + }); + return new ZodObject({ + ...this._def, + shape: () => shape, + }); + } + /** + * @deprecated + */ + deepPartial() { + return deepPartialify(this); + } + partial(mask) { + const newShape = {}; + util_1.util.objectKeys(this.shape).forEach((key) => { + const fieldSchema = this.shape[key]; + if (mask && !mask[key]) { + newShape[key] = fieldSchema; + } + else { + newShape[key] = fieldSchema.optional(); + } + }); + return new ZodObject({ + ...this._def, + shape: () => newShape, + }); + } + required(mask) { + const newShape = {}; + util_1.util.objectKeys(this.shape).forEach((key) => { + if (mask && !mask[key]) { + newShape[key] = this.shape[key]; + } + else { + const fieldSchema = this.shape[key]; + let newField = fieldSchema; + while (newField instanceof ZodOptional) { + newField = newField._def.innerType; + } + newShape[key] = newField; + } + }); + return new ZodObject({ + ...this._def, + shape: () => newShape, + }); + } + keyof() { + return createZodEnum(util_1.util.objectKeys(this.shape)); + } +} +exports.ZodObject = ZodObject; +ZodObject.create = (shape, params) => { + return new ZodObject({ + shape: () => shape, + unknownKeys: "strip", + catchall: ZodNever.create(), + typeName: ZodFirstPartyTypeKind.ZodObject, + ...processCreateParams(params), + }); +}; +ZodObject.strictCreate = (shape, params) => { + return new ZodObject({ + shape: () => shape, + unknownKeys: "strict", + catchall: ZodNever.create(), + typeName: ZodFirstPartyTypeKind.ZodObject, + ...processCreateParams(params), + }); +}; +ZodObject.lazycreate = (shape, params) => { + return new ZodObject({ + shape, + unknownKeys: "strip", + catchall: ZodNever.create(), + typeName: ZodFirstPartyTypeKind.ZodObject, + ...processCreateParams(params), + }); +}; +class ZodUnion extends ZodType { + _parse(input) { + const { ctx } = this._processInputParams(input); + const options = this._def.options; + function handleResults(results) { + // return first issue-free validation if it exists + for (const result of results) { + if (result.result.status === "valid") { + return result.result; + } + } + for (const result of results) { + if (result.result.status === "dirty") { + // add issues from dirty option + ctx.common.issues.push(...result.ctx.common.issues); + return result.result; + } + } + // return invalid + const unionErrors = results.map((result) => new ZodError_1.ZodError(result.ctx.common.issues)); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_union, + unionErrors, + }); + return parseUtil_1.INVALID; + } + if (ctx.common.async) { + return Promise.all(options.map(async (option) => { + const childCtx = { + ...ctx, + common: { + ...ctx.common, + issues: [], + }, + parent: null, + }; + return { + result: await option._parseAsync({ + data: ctx.data, + path: ctx.path, + parent: childCtx, + }), + ctx: childCtx, + }; + })).then(handleResults); + } + else { + let dirty = undefined; + const issues = []; + for (const option of options) { + const childCtx = { + ...ctx, + common: { + ...ctx.common, + issues: [], + }, + parent: null, + }; + const result = option._parseSync({ + data: ctx.data, + path: ctx.path, + parent: childCtx, + }); + if (result.status === "valid") { + return result; + } + else if (result.status === "dirty" && !dirty) { + dirty = { result, ctx: childCtx }; + } + if (childCtx.common.issues.length) { + issues.push(childCtx.common.issues); + } + } + if (dirty) { + ctx.common.issues.push(...dirty.ctx.common.issues); + return dirty.result; + } + const unionErrors = issues.map((issues) => new ZodError_1.ZodError(issues)); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_union, + unionErrors, + }); + return parseUtil_1.INVALID; + } + } + get options() { + return this._def.options; + } +} +exports.ZodUnion = ZodUnion; +ZodUnion.create = (types, params) => { + return new ZodUnion({ + options: types, + typeName: ZodFirstPartyTypeKind.ZodUnion, + ...processCreateParams(params), + }); +}; +///////////////////////////////////////////////////// +///////////////////////////////////////////////////// +////////// ////////// +////////// ZodDiscriminatedUnion ////////// +////////// ////////// +///////////////////////////////////////////////////// +///////////////////////////////////////////////////// +const getDiscriminator = (type) => { + if (type instanceof ZodLazy) { + return getDiscriminator(type.schema); + } + else if (type instanceof ZodEffects) { + return getDiscriminator(type.innerType()); + } + else if (type instanceof ZodLiteral) { + return [type.value]; + } + else if (type instanceof ZodEnum) { + return type.options; + } + else if (type instanceof ZodNativeEnum) { + // eslint-disable-next-line ban/ban + return util_1.util.objectValues(type.enum); + } + else if (type instanceof ZodDefault) { + return getDiscriminator(type._def.innerType); + } + else if (type instanceof ZodUndefined) { + return [undefined]; + } + else if (type instanceof ZodNull) { + return [null]; + } + else if (type instanceof ZodOptional) { + return [undefined, ...getDiscriminator(type.unwrap())]; + } + else if (type instanceof ZodNullable) { + return [null, ...getDiscriminator(type.unwrap())]; + } + else if (type instanceof ZodBranded) { + return getDiscriminator(type.unwrap()); + } + else if (type instanceof ZodReadonly) { + return getDiscriminator(type.unwrap()); + } + else if (type instanceof ZodCatch) { + return getDiscriminator(type._def.innerType); + } + else { + return []; + } +}; +class ZodDiscriminatedUnion extends ZodType { + _parse(input) { + const { ctx } = this._processInputParams(input); + if (ctx.parsedType !== util_1.ZodParsedType.object) { + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_type, + expected: util_1.ZodParsedType.object, + received: ctx.parsedType, + }); + return parseUtil_1.INVALID; + } + const discriminator = this.discriminator; + const discriminatorValue = ctx.data[discriminator]; + const option = this.optionsMap.get(discriminatorValue); + if (!option) { + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_union_discriminator, + options: Array.from(this.optionsMap.keys()), + path: [discriminator], + }); + return parseUtil_1.INVALID; + } + if (ctx.common.async) { + return option._parseAsync({ + data: ctx.data, + path: ctx.path, + parent: ctx, + }); + } + else { + return option._parseSync({ + data: ctx.data, + path: ctx.path, + parent: ctx, + }); + } + } + get discriminator() { + return this._def.discriminator; + } + get options() { + return this._def.options; + } + get optionsMap() { + return this._def.optionsMap; + } + /** + * The constructor of the discriminated union schema. Its behaviour is very similar to that of the normal z.union() constructor. + * However, it only allows a union of objects, all of which need to share a discriminator property. This property must + * have a different value for each object in the union. + * @param discriminator the name of the discriminator property + * @param types an array of object schemas + * @param params + */ + static create(discriminator, options, params) { + // Get all the valid discriminator values + const optionsMap = new Map(); + // try { + for (const type of options) { + const discriminatorValues = getDiscriminator(type.shape[discriminator]); + if (!discriminatorValues.length) { + throw new Error(`A discriminator value for key \`${discriminator}\` could not be extracted from all schema options`); + } + for (const value of discriminatorValues) { + if (optionsMap.has(value)) { + throw new Error(`Discriminator property ${String(discriminator)} has duplicate value ${String(value)}`); + } + optionsMap.set(value, type); + } + } + return new ZodDiscriminatedUnion({ + typeName: ZodFirstPartyTypeKind.ZodDiscriminatedUnion, + discriminator, + options, + optionsMap, + ...processCreateParams(params), + }); + } +} +exports.ZodDiscriminatedUnion = ZodDiscriminatedUnion; +function mergeValues(a, b) { + const aType = (0, util_1.getParsedType)(a); + const bType = (0, util_1.getParsedType)(b); + if (a === b) { + return { valid: true, data: a }; + } + else if (aType === util_1.ZodParsedType.object && bType === util_1.ZodParsedType.object) { + const bKeys = util_1.util.objectKeys(b); + const sharedKeys = util_1.util + .objectKeys(a) + .filter((key) => bKeys.indexOf(key) !== -1); + const newObj = { ...a, ...b }; + for (const key of sharedKeys) { + const sharedValue = mergeValues(a[key], b[key]); + if (!sharedValue.valid) { + return { valid: false }; + } + newObj[key] = sharedValue.data; + } + return { valid: true, data: newObj }; + } + else if (aType === util_1.ZodParsedType.array && bType === util_1.ZodParsedType.array) { + if (a.length !== b.length) { + return { valid: false }; + } + const newArray = []; + for (let index = 0; index < a.length; index++) { + const itemA = a[index]; + const itemB = b[index]; + const sharedValue = mergeValues(itemA, itemB); + if (!sharedValue.valid) { + return { valid: false }; + } + newArray.push(sharedValue.data); + } + return { valid: true, data: newArray }; + } + else if (aType === util_1.ZodParsedType.date && + bType === util_1.ZodParsedType.date && + +a === +b) { + return { valid: true, data: a }; + } + else { + return { valid: false }; + } +} +class ZodIntersection extends ZodType { + _parse(input) { + const { status, ctx } = this._processInputParams(input); + const handleParsed = (parsedLeft, parsedRight) => { + if ((0, parseUtil_1.isAborted)(parsedLeft) || (0, parseUtil_1.isAborted)(parsedRight)) { + return parseUtil_1.INVALID; + } + const merged = mergeValues(parsedLeft.value, parsedRight.value); + if (!merged.valid) { + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_intersection_types, + }); + return parseUtil_1.INVALID; + } + if ((0, parseUtil_1.isDirty)(parsedLeft) || (0, parseUtil_1.isDirty)(parsedRight)) { + status.dirty(); + } + return { status: status.value, value: merged.data }; + }; + if (ctx.common.async) { + return Promise.all([ + this._def.left._parseAsync({ + data: ctx.data, + path: ctx.path, + parent: ctx, + }), + this._def.right._parseAsync({ + data: ctx.data, + path: ctx.path, + parent: ctx, + }), + ]).then(([left, right]) => handleParsed(left, right)); + } + else { + return handleParsed(this._def.left._parseSync({ + data: ctx.data, + path: ctx.path, + parent: ctx, + }), this._def.right._parseSync({ + data: ctx.data, + path: ctx.path, + parent: ctx, + })); + } + } +} +exports.ZodIntersection = ZodIntersection; +ZodIntersection.create = (left, right, params) => { + return new ZodIntersection({ + left: left, + right: right, + typeName: ZodFirstPartyTypeKind.ZodIntersection, + ...processCreateParams(params), + }); +}; +class ZodTuple extends ZodType { + _parse(input) { + const { status, ctx } = this._processInputParams(input); + if (ctx.parsedType !== util_1.ZodParsedType.array) { + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_type, + expected: util_1.ZodParsedType.array, + received: ctx.parsedType, + }); + return parseUtil_1.INVALID; + } + if (ctx.data.length < this._def.items.length) { + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.too_small, + minimum: this._def.items.length, + inclusive: true, + exact: false, + type: "array", + }); + return parseUtil_1.INVALID; + } + const rest = this._def.rest; + if (!rest && ctx.data.length > this._def.items.length) { + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.too_big, + maximum: this._def.items.length, + inclusive: true, + exact: false, + type: "array", + }); + status.dirty(); + } + const items = [...ctx.data] + .map((item, itemIndex) => { + const schema = this._def.items[itemIndex] || this._def.rest; + if (!schema) + return null; + return schema._parse(new ParseInputLazyPath(ctx, item, ctx.path, itemIndex)); + }) + .filter((x) => !!x); // filter nulls + if (ctx.common.async) { + return Promise.all(items).then((results) => { + return parseUtil_1.ParseStatus.mergeArray(status, results); + }); + } + else { + return parseUtil_1.ParseStatus.mergeArray(status, items); + } + } + get items() { + return this._def.items; + } + rest(rest) { + return new ZodTuple({ + ...this._def, + rest, + }); + } +} +exports.ZodTuple = ZodTuple; +ZodTuple.create = (schemas, params) => { + if (!Array.isArray(schemas)) { + throw new Error("You must pass an array of schemas to z.tuple([ ... ])"); + } + return new ZodTuple({ + items: schemas, + typeName: ZodFirstPartyTypeKind.ZodTuple, + rest: null, + ...processCreateParams(params), + }); +}; +class ZodRecord extends ZodType { + get keySchema() { + return this._def.keyType; + } + get valueSchema() { + return this._def.valueType; + } + _parse(input) { + const { status, ctx } = this._processInputParams(input); + if (ctx.parsedType !== util_1.ZodParsedType.object) { + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_type, + expected: util_1.ZodParsedType.object, + received: ctx.parsedType, + }); + return parseUtil_1.INVALID; + } + const pairs = []; + const keyType = this._def.keyType; + const valueType = this._def.valueType; + for (const key in ctx.data) { + pairs.push({ + key: keyType._parse(new ParseInputLazyPath(ctx, key, ctx.path, key)), + value: valueType._parse(new ParseInputLazyPath(ctx, ctx.data[key], ctx.path, key)), + alwaysSet: key in ctx.data, + }); + } + if (ctx.common.async) { + return parseUtil_1.ParseStatus.mergeObjectAsync(status, pairs); + } + else { + return parseUtil_1.ParseStatus.mergeObjectSync(status, pairs); + } + } + get element() { + return this._def.valueType; + } + static create(first, second, third) { + if (second instanceof ZodType) { + return new ZodRecord({ + keyType: first, + valueType: second, + typeName: ZodFirstPartyTypeKind.ZodRecord, + ...processCreateParams(third), + }); + } + return new ZodRecord({ + keyType: ZodString.create(), + valueType: first, + typeName: ZodFirstPartyTypeKind.ZodRecord, + ...processCreateParams(second), + }); + } +} +exports.ZodRecord = ZodRecord; +class ZodMap extends ZodType { + get keySchema() { + return this._def.keyType; + } + get valueSchema() { + return this._def.valueType; + } + _parse(input) { + const { status, ctx } = this._processInputParams(input); + if (ctx.parsedType !== util_1.ZodParsedType.map) { + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_type, + expected: util_1.ZodParsedType.map, + received: ctx.parsedType, + }); + return parseUtil_1.INVALID; + } + const keyType = this._def.keyType; + const valueType = this._def.valueType; + const pairs = [...ctx.data.entries()].map(([key, value], index) => { + return { + key: keyType._parse(new ParseInputLazyPath(ctx, key, ctx.path, [index, "key"])), + value: valueType._parse(new ParseInputLazyPath(ctx, value, ctx.path, [index, "value"])), + }; + }); + if (ctx.common.async) { + const finalMap = new Map(); + return Promise.resolve().then(async () => { + for (const pair of pairs) { + const key = await pair.key; + const value = await pair.value; + if (key.status === "aborted" || value.status === "aborted") { + return parseUtil_1.INVALID; + } + if (key.status === "dirty" || value.status === "dirty") { + status.dirty(); + } + finalMap.set(key.value, value.value); + } + return { status: status.value, value: finalMap }; + }); + } + else { + const finalMap = new Map(); + for (const pair of pairs) { + const key = pair.key; + const value = pair.value; + if (key.status === "aborted" || value.status === "aborted") { + return parseUtil_1.INVALID; + } + if (key.status === "dirty" || value.status === "dirty") { + status.dirty(); + } + finalMap.set(key.value, value.value); + } + return { status: status.value, value: finalMap }; + } + } +} +exports.ZodMap = ZodMap; +ZodMap.create = (keyType, valueType, params) => { + return new ZodMap({ + valueType, + keyType, + typeName: ZodFirstPartyTypeKind.ZodMap, + ...processCreateParams(params), + }); +}; +class ZodSet extends ZodType { + _parse(input) { + const { status, ctx } = this._processInputParams(input); + if (ctx.parsedType !== util_1.ZodParsedType.set) { + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_type, + expected: util_1.ZodParsedType.set, + received: ctx.parsedType, + }); + return parseUtil_1.INVALID; + } + const def = this._def; + if (def.minSize !== null) { + if (ctx.data.size < def.minSize.value) { + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.too_small, + minimum: def.minSize.value, + type: "set", + inclusive: true, + exact: false, + message: def.minSize.message, + }); + status.dirty(); + } + } + if (def.maxSize !== null) { + if (ctx.data.size > def.maxSize.value) { + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.too_big, + maximum: def.maxSize.value, + type: "set", + inclusive: true, + exact: false, + message: def.maxSize.message, + }); + status.dirty(); + } + } + const valueType = this._def.valueType; + function finalizeSet(elements) { + const parsedSet = new Set(); + for (const element of elements) { + if (element.status === "aborted") + return parseUtil_1.INVALID; + if (element.status === "dirty") + status.dirty(); + parsedSet.add(element.value); + } + return { status: status.value, value: parsedSet }; + } + const elements = [...ctx.data.values()].map((item, i) => valueType._parse(new ParseInputLazyPath(ctx, item, ctx.path, i))); + if (ctx.common.async) { + return Promise.all(elements).then((elements) => finalizeSet(elements)); + } + else { + return finalizeSet(elements); + } + } + min(minSize, message) { + return new ZodSet({ + ...this._def, + minSize: { value: minSize, message: errorUtil_1.errorUtil.toString(message) }, + }); + } + max(maxSize, message) { + return new ZodSet({ + ...this._def, + maxSize: { value: maxSize, message: errorUtil_1.errorUtil.toString(message) }, + }); + } + size(size, message) { + return this.min(size, message).max(size, message); + } + nonempty(message) { + return this.min(1, message); + } +} +exports.ZodSet = ZodSet; +ZodSet.create = (valueType, params) => { + return new ZodSet({ + valueType, + minSize: null, + maxSize: null, + typeName: ZodFirstPartyTypeKind.ZodSet, + ...processCreateParams(params), + }); +}; +class ZodFunction extends ZodType { + constructor() { + super(...arguments); + this.validate = this.implement; + } + _parse(input) { + const { ctx } = this._processInputParams(input); + if (ctx.parsedType !== util_1.ZodParsedType.function) { + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_type, + expected: util_1.ZodParsedType.function, + received: ctx.parsedType, + }); + return parseUtil_1.INVALID; + } + function makeArgsIssue(args, error) { + return (0, parseUtil_1.makeIssue)({ + data: args, + path: ctx.path, + errorMaps: [ + ctx.common.contextualErrorMap, + ctx.schemaErrorMap, + (0, errors_1.getErrorMap)(), + errors_1.defaultErrorMap, + ].filter((x) => !!x), + issueData: { + code: ZodError_1.ZodIssueCode.invalid_arguments, + argumentsError: error, + }, + }); + } + function makeReturnsIssue(returns, error) { + return (0, parseUtil_1.makeIssue)({ + data: returns, + path: ctx.path, + errorMaps: [ + ctx.common.contextualErrorMap, + ctx.schemaErrorMap, + (0, errors_1.getErrorMap)(), + errors_1.defaultErrorMap, + ].filter((x) => !!x), + issueData: { + code: ZodError_1.ZodIssueCode.invalid_return_type, + returnTypeError: error, + }, + }); + } + const params = { errorMap: ctx.common.contextualErrorMap }; + const fn = ctx.data; + if (this._def.returns instanceof ZodPromise) { + // Would love a way to avoid disabling this rule, but we need + // an alias (using an arrow function was what caused 2651). + // eslint-disable-next-line @typescript-eslint/no-this-alias + const me = this; + return (0, parseUtil_1.OK)(async function (...args) { + const error = new ZodError_1.ZodError([]); + const parsedArgs = await me._def.args + .parseAsync(args, params) + .catch((e) => { + error.addIssue(makeArgsIssue(args, e)); + throw error; + }); + const result = await Reflect.apply(fn, this, parsedArgs); + const parsedReturns = await me._def.returns._def.type + .parseAsync(result, params) + .catch((e) => { + error.addIssue(makeReturnsIssue(result, e)); + throw error; + }); + return parsedReturns; + }); + } + else { + // Would love a way to avoid disabling this rule, but we need + // an alias (using an arrow function was what caused 2651). + // eslint-disable-next-line @typescript-eslint/no-this-alias + const me = this; + return (0, parseUtil_1.OK)(function (...args) { + const parsedArgs = me._def.args.safeParse(args, params); + if (!parsedArgs.success) { + throw new ZodError_1.ZodError([makeArgsIssue(args, parsedArgs.error)]); + } + const result = Reflect.apply(fn, this, parsedArgs.data); + const parsedReturns = me._def.returns.safeParse(result, params); + if (!parsedReturns.success) { + throw new ZodError_1.ZodError([makeReturnsIssue(result, parsedReturns.error)]); + } + return parsedReturns.data; + }); + } + } + parameters() { + return this._def.args; + } + returnType() { + return this._def.returns; + } + args(...items) { + return new ZodFunction({ + ...this._def, + args: ZodTuple.create(items).rest(ZodUnknown.create()), + }); + } + returns(returnType) { + return new ZodFunction({ + ...this._def, + returns: returnType, + }); + } + implement(func) { + const validatedFunc = this.parse(func); + return validatedFunc; + } + strictImplement(func) { + const validatedFunc = this.parse(func); + return validatedFunc; + } + static create(args, returns, params) { + return new ZodFunction({ + args: (args + ? args + : ZodTuple.create([]).rest(ZodUnknown.create())), + returns: returns || ZodUnknown.create(), + typeName: ZodFirstPartyTypeKind.ZodFunction, + ...processCreateParams(params), + }); + } +} +exports.ZodFunction = ZodFunction; +class ZodLazy extends ZodType { + get schema() { + return this._def.getter(); + } + _parse(input) { + const { ctx } = this._processInputParams(input); + const lazySchema = this._def.getter(); + return lazySchema._parse({ data: ctx.data, path: ctx.path, parent: ctx }); + } +} +exports.ZodLazy = ZodLazy; +ZodLazy.create = (getter, params) => { + return new ZodLazy({ + getter: getter, + typeName: ZodFirstPartyTypeKind.ZodLazy, + ...processCreateParams(params), + }); +}; +class ZodLiteral extends ZodType { + _parse(input) { + if (input.data !== this._def.value) { + const ctx = this._getOrReturnCtx(input); + (0, parseUtil_1.addIssueToContext)(ctx, { + received: ctx.data, + code: ZodError_1.ZodIssueCode.invalid_literal, + expected: this._def.value, + }); + return parseUtil_1.INVALID; + } + return { status: "valid", value: input.data }; + } + get value() { + return this._def.value; + } +} +exports.ZodLiteral = ZodLiteral; +ZodLiteral.create = (value, params) => { + return new ZodLiteral({ + value: value, + typeName: ZodFirstPartyTypeKind.ZodLiteral, + ...processCreateParams(params), + }); +}; +function createZodEnum(values, params) { + return new ZodEnum({ + values, + typeName: ZodFirstPartyTypeKind.ZodEnum, + ...processCreateParams(params), + }); +} +class ZodEnum extends ZodType { + constructor() { + super(...arguments); + _ZodEnum_cache.set(this, void 0); + } + _parse(input) { + if (typeof input.data !== "string") { + const ctx = this._getOrReturnCtx(input); + const expectedValues = this._def.values; + (0, parseUtil_1.addIssueToContext)(ctx, { + expected: util_1.util.joinValues(expectedValues), + received: ctx.parsedType, + code: ZodError_1.ZodIssueCode.invalid_type, + }); + return parseUtil_1.INVALID; + } + if (!__classPrivateFieldGet(this, _ZodEnum_cache, "f")) { + __classPrivateFieldSet(this, _ZodEnum_cache, new Set(this._def.values), "f"); + } + if (!__classPrivateFieldGet(this, _ZodEnum_cache, "f").has(input.data)) { + const ctx = this._getOrReturnCtx(input); + const expectedValues = this._def.values; + (0, parseUtil_1.addIssueToContext)(ctx, { + received: ctx.data, + code: ZodError_1.ZodIssueCode.invalid_enum_value, + options: expectedValues, + }); + return parseUtil_1.INVALID; + } + return (0, parseUtil_1.OK)(input.data); + } + get options() { + return this._def.values; + } + get enum() { + const enumValues = {}; + for (const val of this._def.values) { + enumValues[val] = val; + } + return enumValues; + } + get Values() { + const enumValues = {}; + for (const val of this._def.values) { + enumValues[val] = val; + } + return enumValues; + } + get Enum() { + const enumValues = {}; + for (const val of this._def.values) { + enumValues[val] = val; + } + return enumValues; + } + extract(values, newDef = this._def) { + return ZodEnum.create(values, { + ...this._def, + ...newDef, + }); + } + exclude(values, newDef = this._def) { + return ZodEnum.create(this.options.filter((opt) => !values.includes(opt)), { + ...this._def, + ...newDef, + }); + } +} +exports.ZodEnum = ZodEnum; +_ZodEnum_cache = new WeakMap(); +ZodEnum.create = createZodEnum; +class ZodNativeEnum extends ZodType { + constructor() { + super(...arguments); + _ZodNativeEnum_cache.set(this, void 0); + } + _parse(input) { + const nativeEnumValues = util_1.util.getValidEnumValues(this._def.values); + const ctx = this._getOrReturnCtx(input); + if (ctx.parsedType !== util_1.ZodParsedType.string && + ctx.parsedType !== util_1.ZodParsedType.number) { + const expectedValues = util_1.util.objectValues(nativeEnumValues); + (0, parseUtil_1.addIssueToContext)(ctx, { + expected: util_1.util.joinValues(expectedValues), + received: ctx.parsedType, + code: ZodError_1.ZodIssueCode.invalid_type, + }); + return parseUtil_1.INVALID; + } + if (!__classPrivateFieldGet(this, _ZodNativeEnum_cache, "f")) { + __classPrivateFieldSet(this, _ZodNativeEnum_cache, new Set(util_1.util.getValidEnumValues(this._def.values)), "f"); + } + if (!__classPrivateFieldGet(this, _ZodNativeEnum_cache, "f").has(input.data)) { + const expectedValues = util_1.util.objectValues(nativeEnumValues); + (0, parseUtil_1.addIssueToContext)(ctx, { + received: ctx.data, + code: ZodError_1.ZodIssueCode.invalid_enum_value, + options: expectedValues, + }); + return parseUtil_1.INVALID; + } + return (0, parseUtil_1.OK)(input.data); + } + get enum() { + return this._def.values; + } +} +exports.ZodNativeEnum = ZodNativeEnum; +_ZodNativeEnum_cache = new WeakMap(); +ZodNativeEnum.create = (values, params) => { + return new ZodNativeEnum({ + values: values, + typeName: ZodFirstPartyTypeKind.ZodNativeEnum, + ...processCreateParams(params), + }); +}; +class ZodPromise extends ZodType { + unwrap() { + return this._def.type; + } + _parse(input) { + const { ctx } = this._processInputParams(input); + if (ctx.parsedType !== util_1.ZodParsedType.promise && + ctx.common.async === false) { + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_type, + expected: util_1.ZodParsedType.promise, + received: ctx.parsedType, + }); + return parseUtil_1.INVALID; + } + const promisified = ctx.parsedType === util_1.ZodParsedType.promise + ? ctx.data + : Promise.resolve(ctx.data); + return (0, parseUtil_1.OK)(promisified.then((data) => { + return this._def.type.parseAsync(data, { + path: ctx.path, + errorMap: ctx.common.contextualErrorMap, + }); + })); + } +} +exports.ZodPromise = ZodPromise; +ZodPromise.create = (schema, params) => { + return new ZodPromise({ + type: schema, + typeName: ZodFirstPartyTypeKind.ZodPromise, + ...processCreateParams(params), + }); +}; +class ZodEffects extends ZodType { + innerType() { + return this._def.schema; + } + sourceType() { + return this._def.schema._def.typeName === ZodFirstPartyTypeKind.ZodEffects + ? this._def.schema.sourceType() + : this._def.schema; + } + _parse(input) { + const { status, ctx } = this._processInputParams(input); + const effect = this._def.effect || null; + const checkCtx = { + addIssue: (arg) => { + (0, parseUtil_1.addIssueToContext)(ctx, arg); + if (arg.fatal) { + status.abort(); + } + else { + status.dirty(); + } + }, + get path() { + return ctx.path; + }, + }; + checkCtx.addIssue = checkCtx.addIssue.bind(checkCtx); + if (effect.type === "preprocess") { + const processed = effect.transform(ctx.data, checkCtx); + if (ctx.common.async) { + return Promise.resolve(processed).then(async (processed) => { + if (status.value === "aborted") + return parseUtil_1.INVALID; + const result = await this._def.schema._parseAsync({ + data: processed, + path: ctx.path, + parent: ctx, + }); + if (result.status === "aborted") + return parseUtil_1.INVALID; + if (result.status === "dirty") + return (0, parseUtil_1.DIRTY)(result.value); + if (status.value === "dirty") + return (0, parseUtil_1.DIRTY)(result.value); + return result; + }); + } + else { + if (status.value === "aborted") + return parseUtil_1.INVALID; + const result = this._def.schema._parseSync({ + data: processed, + path: ctx.path, + parent: ctx, + }); + if (result.status === "aborted") + return parseUtil_1.INVALID; + if (result.status === "dirty") + return (0, parseUtil_1.DIRTY)(result.value); + if (status.value === "dirty") + return (0, parseUtil_1.DIRTY)(result.value); + return result; + } + } + if (effect.type === "refinement") { + const executeRefinement = (acc) => { + const result = effect.refinement(acc, checkCtx); + if (ctx.common.async) { + return Promise.resolve(result); + } + if (result instanceof Promise) { + throw new Error("Async refinement encountered during synchronous parse operation. Use .parseAsync instead."); + } + return acc; + }; + if (ctx.common.async === false) { + const inner = this._def.schema._parseSync({ + data: ctx.data, + path: ctx.path, + parent: ctx, + }); + if (inner.status === "aborted") + return parseUtil_1.INVALID; + if (inner.status === "dirty") + status.dirty(); + // return value is ignored + executeRefinement(inner.value); + return { status: status.value, value: inner.value }; + } + else { + return this._def.schema + ._parseAsync({ data: ctx.data, path: ctx.path, parent: ctx }) + .then((inner) => { + if (inner.status === "aborted") + return parseUtil_1.INVALID; + if (inner.status === "dirty") + status.dirty(); + return executeRefinement(inner.value).then(() => { + return { status: status.value, value: inner.value }; + }); + }); + } + } + if (effect.type === "transform") { + if (ctx.common.async === false) { + const base = this._def.schema._parseSync({ + data: ctx.data, + path: ctx.path, + parent: ctx, + }); + if (!(0, parseUtil_1.isValid)(base)) + return base; + const result = effect.transform(base.value, checkCtx); + if (result instanceof Promise) { + throw new Error(`Asynchronous transform encountered during synchronous parse operation. Use .parseAsync instead.`); + } + return { status: status.value, value: result }; + } + else { + return this._def.schema + ._parseAsync({ data: ctx.data, path: ctx.path, parent: ctx }) + .then((base) => { + if (!(0, parseUtil_1.isValid)(base)) + return base; + return Promise.resolve(effect.transform(base.value, checkCtx)).then((result) => ({ status: status.value, value: result })); + }); + } + } + util_1.util.assertNever(effect); + } +} +exports.ZodEffects = ZodEffects; +exports.ZodTransformer = ZodEffects; +ZodEffects.create = (schema, effect, params) => { + return new ZodEffects({ + schema, + typeName: ZodFirstPartyTypeKind.ZodEffects, + effect, + ...processCreateParams(params), + }); +}; +ZodEffects.createWithPreprocess = (preprocess, schema, params) => { + return new ZodEffects({ + schema, + effect: { type: "preprocess", transform: preprocess }, + typeName: ZodFirstPartyTypeKind.ZodEffects, + ...processCreateParams(params), + }); +}; +class ZodOptional extends ZodType { + _parse(input) { + const parsedType = this._getType(input); + if (parsedType === util_1.ZodParsedType.undefined) { + return (0, parseUtil_1.OK)(undefined); + } + return this._def.innerType._parse(input); + } + unwrap() { + return this._def.innerType; + } +} +exports.ZodOptional = ZodOptional; +ZodOptional.create = (type, params) => { + return new ZodOptional({ + innerType: type, + typeName: ZodFirstPartyTypeKind.ZodOptional, + ...processCreateParams(params), + }); +}; +class ZodNullable extends ZodType { + _parse(input) { + const parsedType = this._getType(input); + if (parsedType === util_1.ZodParsedType.null) { + return (0, parseUtil_1.OK)(null); + } + return this._def.innerType._parse(input); + } + unwrap() { + return this._def.innerType; + } +} +exports.ZodNullable = ZodNullable; +ZodNullable.create = (type, params) => { + return new ZodNullable({ + innerType: type, + typeName: ZodFirstPartyTypeKind.ZodNullable, + ...processCreateParams(params), + }); +}; +class ZodDefault extends ZodType { + _parse(input) { + const { ctx } = this._processInputParams(input); + let data = ctx.data; + if (ctx.parsedType === util_1.ZodParsedType.undefined) { + data = this._def.defaultValue(); + } + return this._def.innerType._parse({ + data, + path: ctx.path, + parent: ctx, + }); + } + removeDefault() { + return this._def.innerType; + } +} +exports.ZodDefault = ZodDefault; +ZodDefault.create = (type, params) => { + return new ZodDefault({ + innerType: type, + typeName: ZodFirstPartyTypeKind.ZodDefault, + defaultValue: typeof params.default === "function" + ? params.default + : () => params.default, + ...processCreateParams(params), + }); +}; +class ZodCatch extends ZodType { + _parse(input) { + const { ctx } = this._processInputParams(input); + // newCtx is used to not collect issues from inner types in ctx + const newCtx = { + ...ctx, + common: { + ...ctx.common, + issues: [], + }, + }; + const result = this._def.innerType._parse({ + data: newCtx.data, + path: newCtx.path, + parent: { + ...newCtx, + }, + }); + if ((0, parseUtil_1.isAsync)(result)) { + return result.then((result) => { + return { + status: "valid", + value: result.status === "valid" + ? result.value + : this._def.catchValue({ + get error() { + return new ZodError_1.ZodError(newCtx.common.issues); + }, + input: newCtx.data, + }), + }; + }); + } + else { + return { + status: "valid", + value: result.status === "valid" + ? result.value + : this._def.catchValue({ + get error() { + return new ZodError_1.ZodError(newCtx.common.issues); + }, + input: newCtx.data, + }), + }; + } + } + removeCatch() { + return this._def.innerType; + } +} +exports.ZodCatch = ZodCatch; +ZodCatch.create = (type, params) => { + return new ZodCatch({ + innerType: type, + typeName: ZodFirstPartyTypeKind.ZodCatch, + catchValue: typeof params.catch === "function" ? params.catch : () => params.catch, + ...processCreateParams(params), + }); +}; +class ZodNaN extends ZodType { + _parse(input) { + const parsedType = this._getType(input); + if (parsedType !== util_1.ZodParsedType.nan) { + const ctx = this._getOrReturnCtx(input); + (0, parseUtil_1.addIssueToContext)(ctx, { + code: ZodError_1.ZodIssueCode.invalid_type, + expected: util_1.ZodParsedType.nan, + received: ctx.parsedType, + }); + return parseUtil_1.INVALID; + } + return { status: "valid", value: input.data }; + } +} +exports.ZodNaN = ZodNaN; +ZodNaN.create = (params) => { + return new ZodNaN({ + typeName: ZodFirstPartyTypeKind.ZodNaN, + ...processCreateParams(params), + }); +}; +exports.BRAND = Symbol("zod_brand"); +class ZodBranded extends ZodType { + _parse(input) { + const { ctx } = this._processInputParams(input); + const data = ctx.data; + return this._def.type._parse({ + data, + path: ctx.path, + parent: ctx, + }); + } + unwrap() { + return this._def.type; + } +} +exports.ZodBranded = ZodBranded; +class ZodPipeline extends ZodType { + _parse(input) { + const { status, ctx } = this._processInputParams(input); + if (ctx.common.async) { + const handleAsync = async () => { + const inResult = await this._def.in._parseAsync({ + data: ctx.data, + path: ctx.path, + parent: ctx, + }); + if (inResult.status === "aborted") + return parseUtil_1.INVALID; + if (inResult.status === "dirty") { + status.dirty(); + return (0, parseUtil_1.DIRTY)(inResult.value); + } + else { + return this._def.out._parseAsync({ + data: inResult.value, + path: ctx.path, + parent: ctx, + }); + } + }; + return handleAsync(); + } + else { + const inResult = this._def.in._parseSync({ + data: ctx.data, + path: ctx.path, + parent: ctx, + }); + if (inResult.status === "aborted") + return parseUtil_1.INVALID; + if (inResult.status === "dirty") { + status.dirty(); + return { + status: "dirty", + value: inResult.value, + }; + } + else { + return this._def.out._parseSync({ + data: inResult.value, + path: ctx.path, + parent: ctx, + }); + } + } + } + static create(a, b) { + return new ZodPipeline({ + in: a, + out: b, + typeName: ZodFirstPartyTypeKind.ZodPipeline, + }); + } +} +exports.ZodPipeline = ZodPipeline; +class ZodReadonly extends ZodType { + _parse(input) { + const result = this._def.innerType._parse(input); + const freeze = (data) => { + if ((0, parseUtil_1.isValid)(data)) { + data.value = Object.freeze(data.value); + } + return data; + }; + return (0, parseUtil_1.isAsync)(result) + ? result.then((data) => freeze(data)) + : freeze(result); + } + unwrap() { + return this._def.innerType; + } +} +exports.ZodReadonly = ZodReadonly; +ZodReadonly.create = (type, params) => { + return new ZodReadonly({ + innerType: type, + typeName: ZodFirstPartyTypeKind.ZodReadonly, + ...processCreateParams(params), + }); +}; +function custom(check, params = {}, +/** + * @deprecated + * + * Pass `fatal` into the params object instead: + * + * ```ts + * z.string().custom((val) => val.length > 5, { fatal: false }) + * ``` + * + */ +fatal) { + if (check) + return ZodAny.create().superRefine((data, ctx) => { + var _a, _b; + if (!check(data)) { + const p = typeof params === "function" + ? params(data) + : typeof params === "string" + ? { message: params } + : params; + const _fatal = (_b = (_a = p.fatal) !== null && _a !== void 0 ? _a : fatal) !== null && _b !== void 0 ? _b : true; + const p2 = typeof p === "string" ? { message: p } : p; + ctx.addIssue({ code: "custom", ...p2, fatal: _fatal }); + } + }); + return ZodAny.create(); +} +exports.custom = custom; +exports.late = { + object: ZodObject.lazycreate, +}; +var ZodFirstPartyTypeKind; +(function (ZodFirstPartyTypeKind) { + ZodFirstPartyTypeKind["ZodString"] = "ZodString"; + ZodFirstPartyTypeKind["ZodNumber"] = "ZodNumber"; + ZodFirstPartyTypeKind["ZodNaN"] = "ZodNaN"; + ZodFirstPartyTypeKind["ZodBigInt"] = "ZodBigInt"; + ZodFirstPartyTypeKind["ZodBoolean"] = "ZodBoolean"; + ZodFirstPartyTypeKind["ZodDate"] = "ZodDate"; + ZodFirstPartyTypeKind["ZodSymbol"] = "ZodSymbol"; + ZodFirstPartyTypeKind["ZodUndefined"] = "ZodUndefined"; + ZodFirstPartyTypeKind["ZodNull"] = "ZodNull"; + ZodFirstPartyTypeKind["ZodAny"] = "ZodAny"; + ZodFirstPartyTypeKind["ZodUnknown"] = "ZodUnknown"; + ZodFirstPartyTypeKind["ZodNever"] = "ZodNever"; + ZodFirstPartyTypeKind["ZodVoid"] = "ZodVoid"; + ZodFirstPartyTypeKind["ZodArray"] = "ZodArray"; + ZodFirstPartyTypeKind["ZodObject"] = "ZodObject"; + ZodFirstPartyTypeKind["ZodUnion"] = "ZodUnion"; + ZodFirstPartyTypeKind["ZodDiscriminatedUnion"] = "ZodDiscriminatedUnion"; + ZodFirstPartyTypeKind["ZodIntersection"] = "ZodIntersection"; + ZodFirstPartyTypeKind["ZodTuple"] = "ZodTuple"; + ZodFirstPartyTypeKind["ZodRecord"] = "ZodRecord"; + ZodFirstPartyTypeKind["ZodMap"] = "ZodMap"; + ZodFirstPartyTypeKind["ZodSet"] = "ZodSet"; + ZodFirstPartyTypeKind["ZodFunction"] = "ZodFunction"; + ZodFirstPartyTypeKind["ZodLazy"] = "ZodLazy"; + ZodFirstPartyTypeKind["ZodLiteral"] = "ZodLiteral"; + ZodFirstPartyTypeKind["ZodEnum"] = "ZodEnum"; + ZodFirstPartyTypeKind["ZodEffects"] = "ZodEffects"; + ZodFirstPartyTypeKind["ZodNativeEnum"] = "ZodNativeEnum"; + ZodFirstPartyTypeKind["ZodOptional"] = "ZodOptional"; + ZodFirstPartyTypeKind["ZodNullable"] = "ZodNullable"; + ZodFirstPartyTypeKind["ZodDefault"] = "ZodDefault"; + ZodFirstPartyTypeKind["ZodCatch"] = "ZodCatch"; + ZodFirstPartyTypeKind["ZodPromise"] = "ZodPromise"; + ZodFirstPartyTypeKind["ZodBranded"] = "ZodBranded"; + ZodFirstPartyTypeKind["ZodPipeline"] = "ZodPipeline"; + ZodFirstPartyTypeKind["ZodReadonly"] = "ZodReadonly"; +})(ZodFirstPartyTypeKind || (exports.ZodFirstPartyTypeKind = ZodFirstPartyTypeKind = {})); +// requires TS 4.4+ +class Class { + constructor(..._) { } +} +const instanceOfType = ( +// const instanceOfType = any>( +cls, params = { + message: `Input not instance of ${cls.name}`, +}) => custom((data) => data instanceof cls, params); +exports["instanceof"] = instanceOfType; +const stringType = ZodString.create; +exports.string = stringType; +const numberType = ZodNumber.create; +exports.number = numberType; +const nanType = ZodNaN.create; +exports.nan = nanType; +const bigIntType = ZodBigInt.create; +exports.bigint = bigIntType; +const booleanType = ZodBoolean.create; +exports.boolean = booleanType; +const dateType = ZodDate.create; +exports.date = dateType; +const symbolType = ZodSymbol.create; +exports.symbol = symbolType; +const undefinedType = ZodUndefined.create; +exports.undefined = undefinedType; +const nullType = ZodNull.create; +exports["null"] = nullType; +const anyType = ZodAny.create; +exports.any = anyType; +const unknownType = ZodUnknown.create; +exports.unknown = unknownType; +const neverType = ZodNever.create; +exports.never = neverType; +const voidType = ZodVoid.create; +exports["void"] = voidType; +const arrayType = ZodArray.create; +exports.array = arrayType; +const objectType = ZodObject.create; +exports.object = objectType; +const strictObjectType = ZodObject.strictCreate; +exports.strictObject = strictObjectType; +const unionType = ZodUnion.create; +exports.union = unionType; +const discriminatedUnionType = ZodDiscriminatedUnion.create; +exports.discriminatedUnion = discriminatedUnionType; +const intersectionType = ZodIntersection.create; +exports.intersection = intersectionType; +const tupleType = ZodTuple.create; +exports.tuple = tupleType; +const recordType = ZodRecord.create; +exports.record = recordType; +const mapType = ZodMap.create; +exports.map = mapType; +const setType = ZodSet.create; +exports.set = setType; +const functionType = ZodFunction.create; +exports["function"] = functionType; +const lazyType = ZodLazy.create; +exports.lazy = lazyType; +const literalType = ZodLiteral.create; +exports.literal = literalType; +const enumType = ZodEnum.create; +exports["enum"] = enumType; +const nativeEnumType = ZodNativeEnum.create; +exports.nativeEnum = nativeEnumType; +const promiseType = ZodPromise.create; +exports.promise = promiseType; +const effectsType = ZodEffects.create; +exports.effect = effectsType; +exports.transformer = effectsType; +const optionalType = ZodOptional.create; +exports.optional = optionalType; +const nullableType = ZodNullable.create; +exports.nullable = nullableType; +const preprocessType = ZodEffects.createWithPreprocess; +exports.preprocess = preprocessType; +const pipelineType = ZodPipeline.create; +exports.pipeline = pipelineType; +const ostring = () => stringType().optional(); +exports.ostring = ostring; +const onumber = () => numberType().optional(); +exports.onumber = onumber; +const oboolean = () => booleanType().optional(); +exports.oboolean = oboolean; +exports.coerce = { + string: ((arg) => ZodString.create({ ...arg, coerce: true })), + number: ((arg) => ZodNumber.create({ ...arg, coerce: true })), + boolean: ((arg) => ZodBoolean.create({ + ...arg, + coerce: true, + })), + bigint: ((arg) => ZodBigInt.create({ ...arg, coerce: true })), + date: ((arg) => ZodDate.create({ ...arg, coerce: true })), +}; +exports.NEVER = parseUtil_1.INVALID; + + +/***/ }), + +/***/ 1619: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getSkipTerraform = exports.main = void 0; +const lib = __importStar(__nccwpck_require__(2487)); +const core = __importStar(__nccwpck_require__(7484)); +const fs = __importStar(__nccwpck_require__(9896)); +const main = () => __awaiter(void 0, void 0, void 0, function* () { + const inputs = { + skipLabelPrefix: core.getInput("skip_label_prefix", { required: true }), + labels: core.getInput("labels", { required: true }), + prAuthor: core.getInput("pr_author", { required: true }), + target: process.env.TFACTION_TARGET, + }; + const config = lib.getConfig(); + // labels is pull request's labels. + const labels = fs.readFileSync(inputs.labels, "utf8").split("\n"); + const isSkip = (0, exports.getSkipTerraform)(inputs, config, labels); + core.exportVariable("TFACTION_SKIP_TERRAFORM", isSkip); + core.setOutput("skip_terraform", isSkip); +}); +exports.main = main; +const getSkipTerraform = (inputs, config, labels) => { + var _a, _b; + // https://suzuki-shunsuke.github.io/tfaction/docs/feature/support-skipping-terraform-renovate-pr + const renovateLogin = (_a = config.renovate_login) !== null && _a !== void 0 ? _a : "renovate[bot]"; + if (!inputs.target) { + throw new Error("TFACTION_TARGET is required"); + } + if (renovateLogin !== inputs.prAuthor) { + // If pull request author isn't Renovate bot + // If the pull request has the skip label of the target, terraform is skipped. + for (let i = 0; i < labels.length; i++) { + if (labels[i] == `${inputs.skipLabelPrefix}${inputs.target}`) { + return true; + } + } + return false; + } + if (!config.skip_terraform_by_renovate) { + return false; + } + // If the pull request has labels of renovate_terraform_labels, terraform is run. + const renovateTerraformLabels = new Set((_b = config.renovate_terraform_labels) !== null && _b !== void 0 ? _b : ["terraform"]); + for (let i = 0; i < labels.length; i++) { + const label = labels[i]; + if (renovateTerraformLabels.has(label)) { + return false; + } + } + return true; +}; +exports.getSkipTerraform = getSkipTerraform; + + +/***/ }), + +/***/ 86: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.main = void 0; +const core = __importStar(__nccwpck_require__(7484)); +const exec = __importStar(__nccwpck_require__(5236)); +const lib = __importStar(__nccwpck_require__(2487)); +const path = __importStar(__nccwpck_require__(6928)); +const fs_1 = __importDefault(__nccwpck_require__(9896)); +const tmp_1 = __importDefault(__nccwpck_require__(1288)); +const glob_1 = __nccwpck_require__(1363); +const main = () => __awaiter(void 0, void 0, void 0, function* () { + var _a; + run({ + workingDir: process.env.TFACTION_WORKING_DIR, + target: process.env.TFACTION_TARGET, + rootDir: (_a = process.env.GITHUB_WORKSPACE) !== null && _a !== void 0 ? _a : "", + plan: core.getBooleanInput("plan", { required: true }), + githubToken: core.getInput("github_token", { required: true }), + }, lib.getConfig()); +}); +exports.main = main; +const run = (inputs, config) => __awaiter(void 0, void 0, void 0, function* () { + var _a, _b, _c, _d, _e, _f, _g, _h; + const workingDirectoryFile = (_a = config.working_directory_file) !== null && _a !== void 0 ? _a : "tfaction.yaml"; + const t = yield lib.getTargetGroup(config, inputs.target, inputs.workingDir); + if (!t.group) { + throw new Error("target config is not found in target_groups"); + } + const wdConfig = lib.readTargetConfig(path.join(t.workingDir, workingDirectoryFile)); + const policyMap = new Map(); + const conftest = (_b = config.conftest) !== null && _b !== void 0 ? _b : {}; + for (const policy of (_d = (_c = config.conftest) === null || _c === void 0 ? void 0 : _c.policies) !== null && _d !== void 0 ? _d : []) { + if (policy.id) { + policyMap.set(policy.id, policy); + } + } + if (conftest.policies === undefined) { + conftest.policies = []; + if (config.conftest_policy_directory) { + conftest.policies.push({ + policy: config.conftest_policy_directory, + plan: true, + }); + } + else { + if (fs_1.default.existsSync("policy")) { + conftest.policies.push({ + policy: "policy", + plan: true, + }); + } + } + } + for (const cfg of [t.group, wdConfig]) { + if ((_e = cfg.conftest) === null || _e === void 0 ? void 0 : _e.disable_all) { + for (const [key, value] of policyMap) { + value.enabled = false; + } + for (const policy of conftest.policies) { + policy.enabled = false; + } + } + for (const policy of (_g = (_f = cfg.conftest) === null || _f === void 0 ? void 0 : _f.policies) !== null && _g !== void 0 ? _g : []) { + if (!policy.id) { + conftest.policies.push(policy); + continue; + } + const elem = policyMap.get(policy.id); + if (!elem) { + policyMap.set(policy.id, policy); + continue; + } + Object.assign(elem, policy); + } + } + const policies = []; + for (const policy of conftest.policies.concat(...policyMap.values())) { + if (policy.enabled !== false && inputs.plan === !!policy.plan) { + policies.push(policy); + } + } + if (policies.length !== 0) { + yield exec.exec("github-comment", ["exec", "-var", `tfaction_target:${t.target}`, "--", "conftest", "-v"], { + cwd: t.workingDir, + env: Object.assign(Object.assign({}, process.env), { GITHUB_TOKEN: inputs.githubToken }), + }); + } + for (const policy of policies) { + if (!policy.policy) { + continue; + } + core.info("Running conftest"); + const paths = []; + if (policy.tf) { + const tfFiles = (0, glob_1.globSync)(path.join(t.workingDir, "*.tf"), { + ignore: ".terraform/**", + }); + const tfJSONFiles = (0, glob_1.globSync)(path.join(t.workingDir, "*.tf.json"), { + ignore: ".terraform/**", + }); + for (const tfFile of tfFiles.concat(tfJSONFiles)) { + paths.push(path.relative(t.workingDir, tfFile)); + } + } + else if (policy.plan) { + paths.push("tfplan.json"); + } + else if (policy.paths) { + for (const p of policy.paths) { + const files = (0, glob_1.globSync)(path.join(t.workingDir, p), { + ignore: ".terraform/**", + }); + for (const file of files) { + paths.push(path.relative(t.workingDir, file)); + } + } + } + const args = [ + "exec", + "-var", + `tfaction_target:${t.target}`, + "-k", + "conftest", + "--", + "conftest", + "test", + "--no-color", + ]; + if (typeof policy.policy === "string") { + args.push("-p", path.join(inputs.rootDir, policy.policy)); + } + else { + for (const p of policy.policy) { + args.push("-p", path.join(inputs.rootDir, p)); + } + } + if (policy.combine) { + args.push("--combine"); + } + if (policy.data !== undefined) { + if (typeof policy.data === "string") { + args.push("--data", path.join(inputs.rootDir, policy.data)); + } + else { + for (const p of policy.data) { + args.push("--data", path.join(inputs.rootDir, p)); + } + } + } + // create a special data file + tmp_1.default.setGracefulCleanup(); + const tmpobj = tmp_1.default.dirSync(); + const data = { + tfaction: { + target: t.target, + working_directory: t.workingDir, + }, + }; + const tmpFile = path.join(tmpobj.name, "data.json"); + fs_1.default.writeFileSync(tmpFile, JSON.stringify(data)); + args.push("--data", tmpFile); + if (policy.fail_on_warn) { + args.push("--fail-on-warn"); + } + if (policy.no_fail) { + args.push("--no-fail"); + } + if (policy.all_namespaces) { + args.push("--all-namespaces"); + } + if (policy.quiet) { + args.push("--quiet"); + } + if (policy.trace) { + args.push("--trace"); + } + if (policy.strict) { + args.push("--strict"); + } + if (policy.show_builtin_errors) { + args.push("--show-builtin-errors"); + } + if (policy.junit_hide_message) { + args.push("--junit-hide-message"); + } + if (policy.suppress_exceptions) { + args.push("--suppress-exceptions"); + } + if (policy.tls) { + args.push("--tls"); + } + // if (policy.ignore) { + // args.push("--ignore", policy.ignore); + // } + if (policy.parser) { + args.push("--parser", policy.parser); + } + // if (policy.capabilities) { + // args.push("--capabilities", policy.capabilities); + // } + if (policy.output) { + args.push("--output", policy.output); + } + for (const n of (_h = policy.namespaces) !== null && _h !== void 0 ? _h : []) { + args.push("-n", n); + } + // for (const n of policy.proto_file_dirs ?? []) { + // args.push("--proto-file-dirs", n); + // } + args.push(...paths); + core.info("github-comment " + args.join(" ")); + yield exec.exec("github-comment", args, { + cwd: t.workingDir, + env: Object.assign(Object.assign({}, process.env), { GITHUB_TOKEN: inputs.githubToken }), + }); + } +}); + + +/***/ }), + +/***/ 8584: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.main = void 0; +const run_1 = __nccwpck_require__(5517); +const main = () => __awaiter(void 0, void 0, void 0, function* () { + yield (0, run_1.run)(); +}); +exports.main = main; + + +/***/ }), + +/***/ 5517: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.run = void 0; +const core = __importStar(__nccwpck_require__(7484)); +const lib = __importStar(__nccwpck_require__(2487)); +const client_secrets_manager_1 = __nccwpck_require__(2994); +function exportSecret(envName, secretID, secretValue, secretKey) { + if (secretKey) { + core.info(`export the secret as the environment variable: secret_id=${secretID} env_name=${envName} secret_key=${secretKey}`); + } + else { + core.info(`export the secret as the environment variable: secret_id=${secretID} env_name=${envName}`); + } + core.setSecret(secretValue); + core.exportVariable(envName, secretValue); +} +function exportSecrets(client, secrets) { + return __awaiter(this, void 0, void 0, function* () { + for (let i = 0; i < secrets.length; i++) { + const secret = secrets[i]; + if (!secret.secret_id) { + throw new Error("secret_id is required"); + } + const command = new client_secrets_manager_1.GetSecretValueCommand({ + SecretId: secret.secret_id, + }); + const response = yield client.send(command); + if (!response.SecretString) { + throw new Error(`SecretString is empty: secret_id=${secret.secret_id}`); + } + let secretJSON = null; + for (let j = 0; j < secret.envs.length; j++) { + const e = secret.envs[j]; + if (!e.env_name) { + throw new Error(`env_name is required: secret_id=${secret.secret_id}`); + } + if (!e.secret_key) { + exportSecret(e.env_name, secret.secret_id, response.SecretString, ""); + continue; + } + if (!secretJSON) { + secretJSON = JSON.parse(response.SecretString); + } + if (!secretJSON[e.secret_key]) { + throw new Error(`secret key isn't found: secret_key=${e.secret_key} secret_id=${secret.secret_id}`); + } + exportSecret(e.env_name, secret.secret_id, secretJSON[e.secret_key], e.secret_key); + } + } + }); +} +const run = () => __awaiter(void 0, void 0, void 0, function* () { + const config = lib.getConfig(); + const targetS = lib.getTarget(); + const wd = lib.getWorkingDir(); + const jobType = lib.getJobType(); + const isApply = lib.getIsApply(); + const t = yield lib.getTargetGroup(config, targetS, wd); + const jobConfig = lib.getJobConfig(t.group, isApply, jobType); + let awsClient = null; + if (t.group.aws_secrets_manager) { + awsClient = new client_secrets_manager_1.SecretsManagerClient({ + region: t.group.aws_region, + }); + yield exportSecrets(awsClient, t.group.aws_secrets_manager); + } + if (jobConfig && jobConfig.aws_secrets_manager) { + if (!awsClient) { + awsClient = new client_secrets_manager_1.SecretsManagerClient({ + region: t.group.aws_region, + }); + } + yield exportSecrets(awsClient, jobConfig.aws_secrets_manager); + } +}); +exports.run = run; + + +/***/ }), + +/***/ 9518: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.main = void 0; +const core = __importStar(__nccwpck_require__(7484)); +const lib = __importStar(__nccwpck_require__(2487)); +const yaml = __nccwpck_require__(4281); +function setSecretToMap(secrets, m) { + for (let i = 0; i < secrets.length; i++) { + const secret = secrets[i]; + if (secret.env_name) { + if (secret.secret_name) { + m.set(secret.env_name, secret.secret_name); + } + else { + m.set(secret.env_name, secret.env_name); + } + } + else { + if (secret.secret_name) { + m.set(secret.secret_name, secret.secret_name); + } + else { + throw new Error("either secret_name or env_name is required"); + } + } + } +} +const getSecrets = (targetConfig, jobConfig) => { + const targetSecrets = targetConfig.secrets; + const secrets = new Map(); + if (targetSecrets != undefined) { + setSecretToMap(targetSecrets, secrets); + } + if (jobConfig != undefined && jobConfig.secrets != undefined) { + setSecretToMap(jobConfig.secrets, secrets); + } + return secrets; +}; +const main = () => __awaiter(void 0, void 0, void 0, function* () { + const config = lib.getConfig(); + const secrets = new Map(Object.entries(JSON.parse(core.getInput("secrets")))); + // Log the list of secrets for troubleshooting + // https://github.com/suzuki-shunsuke/tfaction/issues/1564 + core.info(`The list of secret names passed to the action: ${Array.from(secrets.keys()).join(", ")}`); + const targetS = lib.getTarget(); + const workingDir = lib.getWorkingDir(); + const jobType = lib.getJobType(); + const isApply = lib.getIsApply(); + const t = yield lib.getTargetGroup(config, targetS, workingDir); + const jobConfig = lib.getJobConfig(t.group, isApply, jobType); + for (const [envName, secretName] of getSecrets(t.group, jobConfig)) { + if (!secrets.has(secretName)) { + throw new Error(`secret is not found: ${secretName}`); + } + const secretValue = secrets.get(secretName); + core.info(`export the secret ${secretName} as the environment variable ${envName}`); + core.exportVariable(envName, secretValue); + } +}); +exports.main = main; + + +/***/ }), + +/***/ 2829: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.main_ = exports.main = void 0; +const core = __importStar(__nccwpck_require__(7484)); +const lib = __importStar(__nccwpck_require__(2487)); +const main = () => __awaiter(void 0, void 0, void 0, function* () { + const config = lib.getConfig(); + const result = (0, exports.main_)(config, { + repository: process.env.GITHUB_REPOSITORY, + drift_issue_number: process.env.TFACTION_DRIFT_ISSUE_NUMBER, + }); + for (const [key, value] of Object.entries(result.envs)) { + core.exportVariable(key, value); + } + for (const [key, value] of Object.entries(result.outputs)) { + core.setOutput(key, value); + } +}); +exports.main = main; +const main_ = (config, input) => { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _0, _1; + if (!config.plan_workflow_name) { + throw new Error('The setting "plan_workflow_name" is required in tfaction-root.yaml'); + } + const outputs = { + base_working_directory: config.base_working_directory || ".", + working_directory_file: config.working_directory_file || "tfaction.yaml", + module_base_directory: config.module_base_directory || ".", + module_file: config.module_file || "tfaction_module.yaml", + renovate_login: config.renovate_login || "renovate[bot]", + draft_pr: !!config.draft_pr, + skip_create_pr: !!config.skip_create_pr, + plan_workflow_name: config.plan_workflow_name, + label_prefix_target: ((_a = config === null || config === void 0 ? void 0 : config.label_prefixes) === null || _a === void 0 ? void 0 : _a.target) || "target:", + label_prefix_tfmigrate: ((_b = config === null || config === void 0 ? void 0 : config.label_prefixes) === null || _b === void 0 ? void 0 : _b.tfmigrate) || "tfmigrate:", + label_prefix_skip: ((_c = config === null || config === void 0 ? void 0 : config.label_prefixes) === null || _c === void 0 ? void 0 : _c.skip) || "skip:", + disable_update_related_pull_requests: !((_e = (_d = config === null || config === void 0 ? void 0 : config.update_related_pull_requests) === null || _d === void 0 ? void 0 : _d.enabled) !== null && _e !== void 0 ? _e : true), + update_local_path_module_caller: (_g = (_f = config === null || config === void 0 ? void 0 : config.update_local_path_module_caller) === null || _f === void 0 ? void 0 : _f.enabled) !== null && _g !== void 0 ? _g : false, + aqua_update_checksum_enabled: (_k = (_j = (_h = config === null || config === void 0 ? void 0 : config.aqua) === null || _h === void 0 ? void 0 : _h.update_checksum) === null || _j === void 0 ? void 0 : _j.enabled) !== null && _k !== void 0 ? _k : false, + aqua_update_checksum_prune: (_o = (_m = (_l = config === null || config === void 0 ? void 0 : config.aqua) === null || _l === void 0 ? void 0 : _l.update_checksum) === null || _m === void 0 ? void 0 : _m.prune) !== null && _o !== void 0 ? _o : false, + aqua_update_checksum_skip_push: input.drift_issue_number + ? true + : ((_r = (_q = (_p = config === null || config === void 0 ? void 0 : config.aqua) === null || _p === void 0 ? void 0 : _p.update_checksum) === null || _q === void 0 ? void 0 : _q.skip_push) !== null && _r !== void 0 ? _r : false), + enable_tfsec: (_t = (_s = config === null || config === void 0 ? void 0 : config.tfsec) === null || _s === void 0 ? void 0 : _s.enabled) !== null && _t !== void 0 ? _t : false, + enable_tflint: (_v = (_u = config === null || config === void 0 ? void 0 : config.tflint) === null || _u === void 0 ? void 0 : _u.enabled) !== null && _v !== void 0 ? _v : true, + enable_trivy: (_x = (_w = config === null || config === void 0 ? void 0 : config.trivy) === null || _w === void 0 ? void 0 : _w.enabled) !== null && _x !== void 0 ? _x : true, + tflint_fix: (_z = (_y = config === null || config === void 0 ? void 0 : config.tflint) === null || _y === void 0 ? void 0 : _y.fix) !== null && _z !== void 0 ? _z : false, + terraform_command: (config === null || config === void 0 ? void 0 : config.terraform_command) || "terraform", + drift_issue_repo_owner: "", + drift_issue_repo_name: "", + }; + const envs = { + TFACTION_SKIP_ADDING_AQUA_PACKAGES: (_1 = (_0 = config === null || config === void 0 ? void 0 : config.scaffold_working_directory) === null || _0 === void 0 ? void 0 : _0.skip_adding_aqua_packages) !== null && _1 !== void 0 ? _1 : true, + }; + if (config.drift_detection && config.drift_detection.issue_repo_owner) { + outputs.drift_issue_repo_owner = config.drift_detection.issue_repo_owner; + } + else { + if (input.repository) { + outputs.drift_issue_repo_owner = input.repository.split("/")[0]; + } + } + if (config.drift_detection && config.drift_detection.issue_repo_name) { + outputs.drift_issue_repo_name = config.drift_detection.issue_repo_name; + } + else { + if (input.repository) { + const a = input.repository.split("/"); + if (a.length > 1) { + outputs.drift_issue_repo_name = a[1]; + } + } + } + return { + outputs: outputs, + envs: envs, + }; +}; +exports.main_ = main_; + + +/***/ }), + +/***/ 3287: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.run = exports.main = void 0; +const core = __importStar(__nccwpck_require__(7484)); +const lib = __importStar(__nccwpck_require__(2487)); +const path = __importStar(__nccwpck_require__(6928)); +const main = () => __awaiter(void 0, void 0, void 0, function* () { + const result = yield (0, exports.run)({ + target: process.env.TFACTION_TARGET, + workingDir: process.env.TFACTION_WORKING_DIR, + isApply: lib.getIsApply(), + jobType: lib.getJobType(), + }, lib.getConfig()); + for (const [key, value] of result.envs) { + core.exportVariable(key, value); + } + for (const [key, value] of result.outputs) { + core.setOutput(key, value); + } +}); +exports.main = main; +const run = (inputs, config) => __awaiter(void 0, void 0, void 0, function* () { + var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p; + const workingDirectoryFile = (_a = config.working_directory_file) !== null && _a !== void 0 ? _a : "tfaction.yaml"; + const envs = new Map(); + const outputs = new Map(); + const t = yield lib.getTargetGroup(config, inputs.target, inputs.workingDir); + const workingDir = t.workingDir; + const target = t.target; + const targetConfig = t.group; + envs.set("TFACTION_WORKING_DIR", workingDir); + envs.set("TFACTION_TARGET", target); + outputs.set("working_directory", workingDir); + outputs.set("providers_lock_opts", "-platform=windows_amd64 -platform=linux_amd64 -platform=darwin_amd64"); + for (const [key, value] of lib.setOutputs(["template_dir"], [targetConfig])) { + outputs.set(key, value); + } + outputs.set("enable_tfsec", (_c = (_b = config === null || config === void 0 ? void 0 : config.tfsec) === null || _b === void 0 ? void 0 : _b.enabled) !== null && _c !== void 0 ? _c : false); + outputs.set("enable_tflint", (_e = (_d = config === null || config === void 0 ? void 0 : config.tflint) === null || _d === void 0 ? void 0 : _d.enabled) !== null && _e !== void 0 ? _e : true); + outputs.set("enable_trivy", (_g = (_f = config === null || config === void 0 ? void 0 : config.trivy) === null || _f === void 0 ? void 0 : _f.enabled) !== null && _g !== void 0 ? _g : true); + outputs.set("tflint_fix", (_j = (_h = config === null || config === void 0 ? void 0 : config.tflint) === null || _h === void 0 ? void 0 : _h.fix) !== null && _j !== void 0 ? _j : false); + outputs.set("terraform_command", "terraform"); + if (inputs.jobType === "scaffold_working_dir") { + const m = lib.setOutputs([ + "s3_bucket_name_tfmigrate_history", + "gcs_bucket_name_tfmigrate_history", + "aws_region", + "aws_assume_role_arn", + "gcp_service_account", + "gcp_workload_identity_provider", + "gcp_access_token_scopes", + "gcp_remote_backend_service_account", + "gcp_remote_backend_workload_identity_provider", + ], [targetConfig]); + for (const [key, value] of m) { + outputs.set(key, value); + } + } + else { + const rootJobConfig = lib.getJobConfig(targetConfig, inputs.isApply, inputs.jobType); + const wdConfig = lib.readTargetConfig(path.join(workingDir, workingDirectoryFile)); + const jobConfig = lib.getJobConfig(wdConfig, inputs.isApply, inputs.jobType); + const m1 = lib.setOutputs([ + "s3_bucket_name_tfmigrate_history", + "gcs_bucket_name_tfmigrate_history", + "providers_lock_opts", + "terraform_command", + ], [wdConfig, targetConfig, config]); + for (const [key, value] of m1) { + outputs.set(key, value); + } + const m2 = lib.setOutputs([ + "aws_region", + "aws_assume_role_arn", + "aws_role_session_name", + "gcp_service_account", + "gcp_workload_identity_provider", + "gcp_access_token_scopes", + "gcp_remote_backend_service_account", + "gcp_remote_backend_workload_identity_provider", + ], [jobConfig, wdConfig, rootJobConfig, targetConfig, config]); + for (const [key, value] of m2) { + outputs.set(key, value); + } + if (!outputs.has("aws_role_session_name")) { + const prefix = `tfaction-${inputs.isApply ? "apply" : "plan"}`; + const normalizedTarget = target.replaceAll("/", "_"); + const runID = (_k = process.env.GITHUB_RUN_ID) !== null && _k !== void 0 ? _k : ""; + const names = [ + `${prefix}-${normalizedTarget}-${runID}`, + `${prefix}-${normalizedTarget}`, + `${prefix}-${runID}`, + `${prefix}`, + ]; + for (const name of names) { + if (name.length > 64) { + continue; + } + outputs.set("aws_role_session_name", name); + break; + } + } + outputs.set("destroy", wdConfig.destroy ? true : false); + outputs.set("enable_terraform_docs", (_p = (_m = (_l = wdConfig === null || wdConfig === void 0 ? void 0 : wdConfig.terraform_docs) === null || _l === void 0 ? void 0 : _l.enabled) !== null && _m !== void 0 ? _m : (_o = config === null || config === void 0 ? void 0 : config.terraform_docs) === null || _o === void 0 ? void 0 : _o.enabled) !== null && _p !== void 0 ? _p : false); + const m3 = lib.setEnvs(config, targetConfig, rootJobConfig, wdConfig, jobConfig); + for (const [key, value] of m3) { + envs.set(key, value); + } + } + return { + outputs: outputs, + envs: envs, + }; +}); +exports.run = run; + + +/***/ }), + +/***/ 9407: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__nccwpck_require__(7484)); +const run_1 = __nccwpck_require__(9786); +const testAction = __importStar(__nccwpck_require__(9573)); +try { + if (process.env.TFACTION_TEST_ACTION) { + testAction.main(); + } + else { + (0, run_1.main)({ + action: core.getInput("action"), + }); + } +} +catch (error) { + core.setFailed(error instanceof Error ? error.message : JSON.stringify(error)); +} + + +/***/ }), + +/***/ 2487: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getTargetGroup = exports.setEnvs = exports.setOutputs = exports.setValues = exports.getJobConfig = exports.readTargetConfig = exports.getTargetFromTargetGroupsByWorkingDir = exports.getIsApply = exports.getWorkingDir = exports.getTarget = exports.createWDTargetMap = exports.getConfig = exports.getJobType = void 0; +const fs = __importStar(__nccwpck_require__(9896)); +const path = __importStar(__nccwpck_require__(6928)); +const core = __importStar(__nccwpck_require__(7484)); +const exec = __importStar(__nccwpck_require__(5236)); +const js_yaml_1 = __nccwpck_require__(4281); +const zod_1 = __nccwpck_require__(4809); +const GitHubEnvironment = zod_1.z.union([ + zod_1.z.string(), + zod_1.z.object({ + name: zod_1.z.string(), + url: zod_1.z.string(), + }), +]); +const JobType = zod_1.z.union([ + zod_1.z.literal("terraform"), + zod_1.z.literal("tfmigrate"), + zod_1.z.literal("scaffold_working_dir"), +]); +const getJobType = () => { + if (process.env.TFACTION_JOB_TYPE === undefined) { + throw new Error("environment variable TFACTION_JOB_TYPE is required"); + } + return JobType.parse(process.env.TFACTION_JOB_TYPE); +}; +exports.getJobType = getJobType; +const TfsecConfig = zod_1.z.object({ + enabled: zod_1.z.optional(zod_1.z.boolean()), +}); +const TflintConfig = zod_1.z.object({ + enabled: zod_1.z.optional(zod_1.z.boolean()), + fix: zod_1.z.optional(zod_1.z.boolean()), +}); +const TrivyConfig = zod_1.z.object({ + enabled: zod_1.z.optional(zod_1.z.boolean()), +}); +const TerraformDocsConfig = zod_1.z.object({ + enabled: zod_1.z.optional(zod_1.z.boolean()), +}); +const ConftestPolicyConfig = zod_1.z.object({ + tf: zod_1.z.optional(zod_1.z.boolean()), + plan: zod_1.z.optional(zod_1.z.boolean()), + id: zod_1.z.optional(zod_1.z.string()), + enabled: zod_1.z.optional(zod_1.z.boolean()), + policy: zod_1.z.union([zod_1.z.string(), zod_1.z.array(zod_1.z.string())]), + data: zod_1.z.optional(zod_1.z.union([zod_1.z.string(), zod_1.z.array(zod_1.z.string())])), + fail_on_warn: zod_1.z.optional(zod_1.z.boolean()), + no_fail: zod_1.z.optional(zod_1.z.boolean()), + all_namespaces: zod_1.z.optional(zod_1.z.boolean()), + quiet: zod_1.z.optional(zod_1.z.boolean()), + trace: zod_1.z.optional(zod_1.z.boolean()), + strict: zod_1.z.optional(zod_1.z.boolean()), + show_builtin_errors: zod_1.z.optional(zod_1.z.boolean()), + junit_hide_message: zod_1.z.optional(zod_1.z.boolean()), + suppress_exceptions: zod_1.z.optional(zod_1.z.boolean()), + combine: zod_1.z.optional(zod_1.z.boolean()), + tls: zod_1.z.optional(zod_1.z.boolean()), + ignore: zod_1.z.optional(zod_1.z.string()), + parser: zod_1.z.optional(zod_1.z.string()), + capabilities: zod_1.z.optional(zod_1.z.string()), + output: zod_1.z.optional(zod_1.z.string()), + namespaces: zod_1.z.optional(zod_1.z.array(zod_1.z.string())), + proto_file_dirs: zod_1.z.optional(zod_1.z.array(zod_1.z.string())), + paths: zod_1.z.optional(zod_1.z.array(zod_1.z.string())), +}); +const ConftestConfig = zod_1.z.object({ + disable_all: zod_1.z.optional(zod_1.z.boolean()), + policies: zod_1.z.optional(zod_1.z.array(ConftestPolicyConfig)), +}); +const GitHubSecrets = zod_1.z.array(zod_1.z.object({ + env_name: zod_1.z.string(), + secret_name: zod_1.z.string(), +})); +const AWSSecretsManagerSecretEnv = zod_1.z.object({ + env_name: zod_1.z.string(), + secret_key: zod_1.z.optional(zod_1.z.string()), +}); +const AWSSecretsManagerSecret = zod_1.z.object({ + envs: zod_1.z.array(AWSSecretsManagerSecretEnv), + secret_id: zod_1.z.string(), + version_id: zod_1.z.optional(zod_1.z.string()), + version_stage: zod_1.z.optional(zod_1.z.string()), + aws_region: zod_1.z.optional(zod_1.z.string()), +}); +const JobConfig = zod_1.z.object({ + aws_assume_role_arn: zod_1.z.optional(zod_1.z.string()), + aws_role_session_name: zod_1.z.optional(zod_1.z.string()), + gcp_service_account: zod_1.z.optional(zod_1.z.string()), + gcp_workload_identity_provider: zod_1.z.optional(zod_1.z.string()), + gcp_access_token_scopes: zod_1.z.optional(zod_1.z.string()), + gcp_remote_backend_service_account: zod_1.z.optional(zod_1.z.string()), + gcp_remote_backend_workload_identity_provider: zod_1.z.optional(zod_1.z.string()), + environment: zod_1.z.optional(GitHubEnvironment), + secrets: zod_1.z.optional(GitHubSecrets), + runs_on: zod_1.z.optional(zod_1.z.union([zod_1.z.string(), zod_1.z.array(zod_1.z.string())])), + env: zod_1.z.optional(zod_1.z.record(zod_1.z.string())), + aws_secrets_manager: zod_1.z.optional(zod_1.z.array(AWSSecretsManagerSecret)), +}); +const TargetGroup = zod_1.z.object({ + aws_region: zod_1.z.optional(zod_1.z.string()), + aws_assume_role_arn: zod_1.z.optional(zod_1.z.string()), + aws_role_session_name: zod_1.z.optional(zod_1.z.string()), + destroy: zod_1.z.optional(zod_1.z.boolean()), + env: zod_1.z.optional(zod_1.z.record(zod_1.z.string())), + environment: zod_1.z.optional(GitHubEnvironment), + gcp_service_account: zod_1.z.optional(zod_1.z.string()), + gcp_workload_identity_provider: zod_1.z.optional(zod_1.z.string()), + gcp_remote_backend_service_account: zod_1.z.optional(zod_1.z.string()), + gcp_remote_backend_workload_identity_provider: zod_1.z.optional(zod_1.z.string()), + gcs_bucket_name_tfmigrate_history: zod_1.z.optional(zod_1.z.string()), + runs_on: zod_1.z.optional(zod_1.z.union([zod_1.z.string(), zod_1.z.array(zod_1.z.string())])), + secrets: zod_1.z.optional(GitHubSecrets), + s3_bucket_name_tfmigrate_history: zod_1.z.optional(zod_1.z.string()), + target: zod_1.z.optional(zod_1.z.string()), + template_dir: zod_1.z.optional(zod_1.z.string()), + terraform_apply_config: zod_1.z.optional(JobConfig), + terraform_plan_config: zod_1.z.optional(JobConfig), + tfmigrate_apply_config: zod_1.z.optional(JobConfig), + tfmigrate_plan_config: zod_1.z.optional(JobConfig), + working_directory: zod_1.z.string(), + aws_secrets_manager: zod_1.z.optional(zod_1.z.array(AWSSecretsManagerSecret)), + terraform_command: zod_1.z.optional(zod_1.z.string()), + conftest: zod_1.z.optional(ConftestConfig), +}); +const TargetConfig = zod_1.z.object({ + aws_assume_role_arn: zod_1.z.optional(zod_1.z.string()), + aws_region: zod_1.z.optional(zod_1.z.string()), + destroy: zod_1.z.optional(zod_1.z.boolean()), + drift_detection: zod_1.z.optional(zod_1.z.object({ + enabled: zod_1.z.optional(zod_1.z.boolean()), + })), + env: zod_1.z.optional(zod_1.z.record(zod_1.z.string())), + gcs_bucket_name_tfmigrate_history: zod_1.z.optional(zod_1.z.string()), + gcp_service_account: zod_1.z.optional(zod_1.z.string()), + gcp_workload_identity_provider: zod_1.z.optional(zod_1.z.string()), + providers_lock_opts: zod_1.z.optional(zod_1.z.string()), + s3_bucket_name_tfmigrate_history: zod_1.z.optional(zod_1.z.string()), + secrets: zod_1.z.optional(GitHubSecrets), + terraform_apply_config: zod_1.z.optional(JobConfig), + terraform_plan_config: zod_1.z.optional(JobConfig), + tfmigrate_apply_config: zod_1.z.optional(JobConfig), + tfmigrate_plan_config: zod_1.z.optional(JobConfig), + terraform_command: zod_1.z.optional(zod_1.z.string()), + terraform_docs: zod_1.z.optional(TerraformDocsConfig), + conftest: zod_1.z.optional(ConftestConfig), +}); +const Replace = zod_1.z.object({ + patterns: zod_1.z.array(zod_1.z.object({ + regexp: zod_1.z.string(), + replace: zod_1.z.string(), + flags: zod_1.z.optional(zod_1.z.string()), + })), +}); +const Config = zod_1.z.object({ + aqua: zod_1.z.optional(zod_1.z.object({ + update_checksum: zod_1.z.optional(zod_1.z.object({ + enabled: zod_1.z.optional(zod_1.z.boolean()), + skip_push: zod_1.z.optional(zod_1.z.boolean()), + prune: zod_1.z.optional(zod_1.z.boolean()), + })), + })), + base_working_directory: zod_1.z.optional(zod_1.z.string()), + conftest_policy_directory: zod_1.z.optional(zod_1.z.string()), + conftest: zod_1.z.optional(ConftestConfig), + draft_pr: zod_1.z.optional(zod_1.z.boolean()), + drift_detection: zod_1.z.optional(zod_1.z.object({ + issue_repo_owner: zod_1.z.optional(zod_1.z.string()), + issue_repo_name: zod_1.z.optional(zod_1.z.string()), + num_of_issues: zod_1.z.optional(zod_1.z.number()), + minimum_detection_interval: zod_1.z.optional(zod_1.z.number()), + })), + env: zod_1.z.optional(zod_1.z.record(zod_1.z.string())), + label_prefixes: zod_1.z.optional(zod_1.z.object({ + target: zod_1.z.optional(zod_1.z.string()), + tfmigrate: zod_1.z.optional(zod_1.z.string()), + skip: zod_1.z.optional(zod_1.z.string()), + })), + module_base_directory: zod_1.z.optional(zod_1.z.string()), + module_file: zod_1.z.optional(zod_1.z.string()), + plan_workflow_name: zod_1.z.string(), + renovate_login: zod_1.z.optional(zod_1.z.string()), + renovate_terraform_labels: zod_1.z.optional(zod_1.z.array(zod_1.z.string())), + scaffold_working_directory: zod_1.z.optional(zod_1.z.object({ + skip_adding_aqua_packages: zod_1.z.optional(zod_1.z.boolean()), + })), + skip_create_pr: zod_1.z.optional(zod_1.z.boolean()), + skip_terraform_by_renovate: zod_1.z.optional(zod_1.z.boolean()), + target_groups: zod_1.z.array(TargetGroup), + tflint: zod_1.z.optional(TflintConfig), + tfsec: zod_1.z.optional(TfsecConfig), + trivy: zod_1.z.optional(TrivyConfig), + terraform_docs: zod_1.z.optional(TerraformDocsConfig), + update_local_path_module_caller: zod_1.z.optional(zod_1.z.object({ + enabled: zod_1.z.optional(zod_1.z.boolean()), + })), + terraform_command: zod_1.z.optional(zod_1.z.string()), + update_related_pull_requests: zod_1.z.optional(zod_1.z.object({ + enabled: zod_1.z.optional(zod_1.z.boolean()), + })), + working_directory_file: zod_1.z.optional(zod_1.z.string()), + replace: zod_1.z.optional(Replace), +}); +const getConfig = () => { + let configFilePath = process.env.TFACTION_CONFIG; + if (!configFilePath) { + configFilePath = "tfaction-root.yaml"; + } + return Config.parse((0, js_yaml_1.load)(fs.readFileSync(configFilePath, "utf8"))); +}; +exports.getConfig = getConfig; +const createWDTargetMap = (wds, config) => { + var _a, _b; + const m = new Map(); + for (const wd of wds) { + let target = wd; + for (const tg of config.target_groups) { + if (!wd.startsWith(tg.working_directory)) { + continue; + } + if (tg.target !== undefined) { + target = tg.target + wd.slice(tg.working_directory.length); + } + for (const pattern of (_b = (_a = config.replace) === null || _a === void 0 ? void 0 : _a.patterns) !== null && _b !== void 0 ? _b : []) { + target = target.replace(new RegExp(pattern.regexp, pattern.flags), pattern.replace); + } + break; + } + m.set(wd, target); + } + return m; +}; +exports.createWDTargetMap = createWDTargetMap; +const getTarget = () => { + return process.env.TFACTION_TARGET; +}; +exports.getTarget = getTarget; +const getWorkingDir = () => { + return process.env.TFACTION_WORKING_DIR; +}; +exports.getWorkingDir = getWorkingDir; +const getIsApply = () => { + return process.env.TFACTION_IS_APPLY === "true"; +}; +exports.getIsApply = getIsApply; +const getTargetFromTargetGroupsByWorkingDir = (targetGroups, wd) => { + for (const targetConfig of targetGroups) { + if (wd.startsWith(targetConfig.working_directory)) { + return targetConfig; + } + } + return undefined; +}; +exports.getTargetFromTargetGroupsByWorkingDir = getTargetFromTargetGroupsByWorkingDir; +const readTargetConfig = (p) => { + return TargetConfig.parse((0, js_yaml_1.load)(fs.readFileSync(p, "utf8"))); +}; +exports.readTargetConfig = readTargetConfig; +const getJobConfig = (config, isApply, jobType) => { + if (config == undefined) { + return undefined; + } + if (isApply) { + switch (jobType) { + case "terraform": + return config.terraform_apply_config; + case "tfmigrate": + return config.tfmigrate_apply_config; + } + } + switch (jobType) { + case "terraform": + return config.terraform_plan_config; + case "tfmigrate": + return config.tfmigrate_plan_config; + } +}; +exports.getJobConfig = getJobConfig; +const setValues = (name, values) => { + for (const value of values) { + if (value != undefined) { + core.setOutput(name, value); + return; + } + } +}; +exports.setValues = setValues; +const setOutputs = (keys, objs) => { + const outputs = new Map(); + for (const key of keys) { + for (const obj of objs) { + if (obj != undefined && obj != null && obj[key] != undefined) { + outputs.set(key, obj[key]); + break; + } + } + } + return outputs; +}; +exports.setOutputs = setOutputs; +const setEnvs = (...objs) => { + const envs = new Map(); + for (const obj of objs) { + if (obj === null || obj === void 0 ? void 0 : obj.env) { + for (const [key, value] of Object.entries(obj.env)) { + envs.set(key, value); + } + } + } + return envs; +}; +exports.setEnvs = setEnvs; +const getTargetGroup = (config, target, workingDir) => __awaiter(void 0, void 0, void 0, function* () { + var _a, _b, _c; + if (workingDir) { + const targetConfig = (0, exports.getTargetFromTargetGroupsByWorkingDir)(config.target_groups, workingDir); + if (!targetConfig) { + throw new Error("target config is not found in target_groups"); + } + target = workingDir; + for (const pattern of (_b = (_a = config.replace) === null || _a === void 0 ? void 0 : _a.patterns) !== null && _b !== void 0 ? _b : []) { + target = target.replace(new RegExp(pattern.regexp), pattern.replace); + } + if (targetConfig.target !== undefined) { + target = workingDir.replace(targetConfig.working_directory, targetConfig.target); + } + return { + target: target, + workingDir: workingDir, + group: targetConfig, + }; + } + if (target === undefined) { + throw new Error("Either TFACTION_TARGET or TFACTION_WORKING_DIR is required"); + } + const out = yield exec.getExecOutput("git", ["ls-files"], { + silent: true, + }); + const wds = []; + for (const line of out.stdout.split("\n")) { + if (line.endsWith((_c = config.working_directory_file) !== null && _c !== void 0 ? _c : "tfaction.yaml")) { + wds.push(path.dirname(line)); + } + } + const m = (0, exports.createWDTargetMap)(wds, config); + for (const [wd, t] of m) { + if (t === target) { + workingDir = wd; + break; + } + } + if (workingDir === undefined) { + throw new Error(`No working directory is found for the target ${target}`); + } + const targetConfig = (0, exports.getTargetFromTargetGroupsByWorkingDir)(config.target_groups, workingDir); + if (!targetConfig) { + throw new Error("target config is not found in target_groups"); + } + return { + target: target, + workingDir: workingDir, + group: targetConfig, + }; +}); +exports.getTargetGroup = getTargetGroup; + + +/***/ }), + +/***/ 7289: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.run = exports.main = void 0; +const core = __importStar(__nccwpck_require__(7484)); +const fs = __importStar(__nccwpck_require__(9896)); +const path = __importStar(__nccwpck_require__(6928)); +const main = () => __awaiter(void 0, void 0, void 0, function* () { + const changedFiles = fs + .readFileSync(core.getInput("changed_files", { required: true }), "utf8") + .split("\n"); + const configFiles = fs + .readFileSync(core.getInput("config_files", { required: true }), "utf8") + .split("\n"); + const modules = (0, exports.run)(configFiles, changedFiles); + core.info(`modules: ${Array.from(modules)}`); + core.setOutput("modules", Array.from(modules)); +}); +exports.main = main; +const run = (configFiles, changedFiles) => { + const workingDirs = new Set(); + for (const configFile of configFiles) { + if (configFile === "") { + continue; + } + workingDirs.add(path.dirname(configFile)); + } + const modules = new Set(); + for (const changedFile of changedFiles) { + if (changedFile === "") { + continue; + } + for (const workingDir of workingDirs) { + if (changedFile.startsWith(workingDir + "/")) { + modules.add(workingDir); + } + } + } + return Array.from(modules); +}; +exports.run = run; + + +/***/ }), + +/***/ 2042: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.main = void 0; +const core = __importStar(__nccwpck_require__(7484)); +const fs = __importStar(__nccwpck_require__(9896)); +const path = __importStar(__nccwpck_require__(6928)); +const child_process = __importStar(__nccwpck_require__(5317)); +const lib_1 = __nccwpck_require__(9621); +const main = () => __awaiter(void 0, void 0, void 0, function* () { + const configFiles = fs + .readFileSync(core.getInput("config_files"), "utf8") + .split("\n"); + const moduleFiles = fs + .readFileSync(core.getInput("module_files"), "utf8") + .split("\n"); + // directory where uses modules => modules which are used + const rawModuleCalls = {}; + const allTerraformFiles = Array.from([...configFiles, ...moduleFiles]); + allTerraformFiles.forEach((tfFile) => { + if (tfFile == "") { + return; + } + const tfDir = path.dirname(tfFile); + const inspection = JSON.parse(child_process + .execSync(`terraform-config-inspect --json ${tfDir}`) + .toString("utf-8")); + // List keys of Local Path modules (source starts with ./ or ../) in module_calls + rawModuleCalls[tfDir] = Object.values(inspection["module_calls"]).flatMap((module) => { + const source = module.source; + if (source.startsWith("./") || source.startsWith("../")) { + return [source]; + } + else { + return []; + } + }); + }); + const moduleCallers = (0, lib_1.buildModuleToCallers)((0, lib_1.resolveRelativeCallTree)(rawModuleCalls)); + const json = JSON.stringify(moduleCallers); + core.info(`file: ${json}`); + core.setOutput("file", json); +}); +exports.main = main; + + +/***/ }), + +/***/ 9621: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolveRelativeCallTree = resolveRelativeCallTree; +exports.buildModuleToCallers = buildModuleToCallers; +const node_path_1 = __importDefault(__nccwpck_require__(6760)); +function resolveRelativeCallTree(rawModuleCalls) { + const moduleCalls = {}; + for (const [module, thisChildren] of Object.entries(rawModuleCalls)) { + const absModulePath = node_path_1.default.resolve("/", module); + const resolvedChildModules = []; + for (const child of thisChildren) { + const absChildPath = node_path_1.default.resolve(absModulePath, child); + const resolved = node_path_1.default.relative("/", absChildPath); + resolvedChildModules.push(resolved); + } + moduleCalls[module] = resolvedChildModules; + } + return moduleCalls; +} +function buildModuleToCallers(modulesCalls) { + function findCallers(module) { + const callers = []; + for (const [directCaller, modules] of Object.entries(modulesCalls)) { + if (modules.includes(module)) { + const parentCallers = findCallers(directCaller); + callers.push(directCaller, ...parentCallers); + } + } + return callers; + } + const moduleToCallers = {}; + const modules = [...new Set(Object.values(modulesCalls).flat())]; + for (const module of modules) { + if (!moduleToCallers[module]) { + moduleToCallers[module] = []; + } + moduleToCallers[module].push(...findCallers(module)); + } + return moduleToCallers; +} + + +/***/ }), + +/***/ 9117: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.main = exports.run = void 0; +const core = __importStar(__nccwpck_require__(7484)); +const github = __importStar(__nccwpck_require__(3228)); +const fs = __importStar(__nccwpck_require__(9896)); +const path = __importStar(__nccwpck_require__(6928)); +const lib = __importStar(__nccwpck_require__(2487)); +const getTargetConfigByTarget = (targets, wd, target, isApply, jobType) => { + var _a, _b, _c, _d, _e; + const tg = lib.getTargetFromTargetGroupsByWorkingDir(targets, wd); + if (tg === undefined) { + core.warning(`No target group is found for the working directory ${wd}`); + return undefined; + } + const jobConfig = lib.getJobConfig(tg, isApply, jobType); + if (jobConfig === undefined) { + return { + target: target, + working_directory: wd, + runs_on: (_a = tg.runs_on) !== null && _a !== void 0 ? _a : "ubuntu-latest", + environment: tg === null || tg === void 0 ? void 0 : tg.environment, + secrets: tg.secrets, + job_type: jobType, + }; + } + return { + target: target, + working_directory: wd, + runs_on: (_c = (_b = jobConfig.runs_on) !== null && _b !== void 0 ? _b : tg.runs_on) !== null && _c !== void 0 ? _c : "ubuntu-latest", + environment: (_d = jobConfig.environment) !== null && _d !== void 0 ? _d : tg.environment, + secrets: (_e = jobConfig.secrets) !== null && _e !== void 0 ? _e : tg.secrets, + job_type: jobType, + }; +}; +const getPRBody = (prStr, payload) => { + if (payload.pull_request) { + return payload.pull_request.body || ""; + } + if (!prStr) { + return ""; + } + const pr = JSON.parse(prStr); + return (pr === null || pr === void 0 ? void 0 : pr.body) || ""; +}; +const run = (input) => { + var _a; + const config = input.config; + const isApply = input.isApply; + const workingDirs = listWD(input.configFiles); + const wdTargetMap = lib.createWDTargetMap(workingDirs, config); + const targetWDMap = new Map(); + for (const [wd, t] of wdTargetMap) { + targetWDMap.set(t, wd); + } + const terraformTargets = new Set(); + const tfmigrates = new Set(); + const terraformTargetObjs = new Array(); + const tfmigrateObjs = new Array(); + handleLabels(input.labels, isApply, terraformTargets, targetWDMap, config, terraformTargetObjs, tfmigrateObjs, tfmigrates); + const moduleCallerMap = new Map(Object.entries(input.moduleCallers)); + const modules = [...moduleCallerMap.keys()]; + modules.sort(); + modules.reverse(); + const moduleDirs = input.moduleFiles.map((moduleFile) => { + return path.dirname(moduleFile); + }); + moduleDirs.sort(); + moduleDirs.reverse(); + const moduleSet = new Set(moduleDirs); + const changedWorkingDirs = new Set(); + const changedModules = new Set(); + for (const changedFile of input.changedFiles) { + if (changedFile == "") { + continue; + } + for (const module of modules) { + if (changedFile.startsWith(module + "/")) { + (_a = moduleCallerMap.get(module)) === null || _a === void 0 ? void 0 : _a.forEach((caller) => { + if (wdTargetMap.has(caller)) { + changedWorkingDirs.add(caller); + } + if (moduleSet.has(caller)) { + changedModules.add(caller); + } + }); + break; + } + } + for (const workingDir of workingDirs) { + if (changedFile.startsWith(workingDir + "/")) { + changedWorkingDirs.add(workingDir); + break; + } + } + for (const module of moduleDirs) { + if (changedFile.startsWith(module + "/")) { + changedModules.add(module); + break; + } + } + } + for (const changedWorkingDir of changedWorkingDirs) { + const target = wdTargetMap.get(changedWorkingDir); + if (target === undefined) { + core.warning(`No target is found for the working directory ${changedWorkingDir}`); + continue; + } + if (!terraformTargets.has(target) && !tfmigrates.has(target)) { + const obj = getTargetConfigByTarget(config.target_groups, changedWorkingDir, target, isApply, "terraform"); + if (obj !== undefined) { + terraformTargets.add(target); + terraformTargetObjs.push(obj); + } + } + } + const followupTarget = getFollowupTarget(input); + if (followupTarget && + !tfmigrates.has(followupTarget) && + !terraformTargets.has(followupTarget)) { + const wd = targetWDMap.get(followupTarget); + if (wd === undefined) { + throw new Error(`No working directory is found for the target ${followupTarget}`); + } + const obj = getTargetConfigByTarget(config.target_groups, wd, followupTarget, isApply, "terraform"); + if (obj !== undefined) { + terraformTargets.add(followupTarget); + terraformTargetObjs.push(obj); + } + } + return { + targetConfigs: terraformTargetObjs.concat(tfmigrateObjs), + modules: Array.from(changedModules), + }; +}; +exports.run = run; +const listWD = (configFiles) => { + const workingDirs = new Array(); + for (const configFile of configFiles) { + if (configFile == "") { + continue; + } + workingDirs.push(path.dirname(configFile)); + } + workingDirs.sort(); + workingDirs.reverse(); + return workingDirs; +}; +const getFollowupTarget = (input) => { + // Expected followupPRBody include the line: + // + const followupTargetCommentRegex = new RegExp(//, "s"); + const prBody = getPRBody(input.pr, input.payload); + const matchResult = prBody.match(followupTargetCommentRegex); + return matchResult ? matchResult[1] : ""; +}; +const handleLabels = (labels, isApply, terraformTargets, targetWDMap, config, terraformTargetObjs, tfmigrateObjs, tfmigrates) => { + var _a, _b, _c; + const skips = new Set(); + const targetPrefix = ((_a = config === null || config === void 0 ? void 0 : config.label_prefixes) === null || _a === void 0 ? void 0 : _a.target) || "target:"; + const skipPrefix = ((_b = config === null || config === void 0 ? void 0 : config.label_prefixes) === null || _b === void 0 ? void 0 : _b.skip) || "skip:"; + const tfmigratePrefix = ((_c = config === null || config === void 0 ? void 0 : config.label_prefixes) === null || _c === void 0 ? void 0 : _c.tfmigrate) || "tfmigrate:"; + for (const label of labels) { + if (label == "") { + continue; + } + if (label.startsWith(targetPrefix)) { + const target = label.slice(targetPrefix.length); + if (!terraformTargets.has(target)) { + terraformTargets.add(target); + const wd = targetWDMap.get(target); + if (wd === undefined) { + throw new Error(`No working directory is found for the target ${target}`); + } + const tg = lib.getTargetFromTargetGroupsByWorkingDir(config.target_groups, wd); + const obj = getTargetConfigByTarget(config.target_groups, wd, target, isApply, "terraform"); + if (obj === undefined) { + throw new Error(`No target config is found for the target ${target}`); + } + terraformTargetObjs.push(obj); + } + continue; + } + if (label.startsWith(tfmigratePrefix)) { + const target = label.slice(tfmigratePrefix.length); + if (!tfmigrates.has(target)) { + tfmigrates.add(target); + const wd = targetWDMap.get(target); + if (wd === undefined) { + throw new Error(`No working directory is found for the target ${target}`); + } + const obj = getTargetConfigByTarget(config.target_groups, wd, target, isApply, "tfmigrate"); + if (obj === undefined) { + throw new Error(`No target config is found for the target ${target}`); + } + tfmigrateObjs.push(obj); + } + continue; + } + if (label.startsWith(skipPrefix)) { + skips.add(label.slice(skipPrefix.length)); + continue; + } + } +}; +const main = () => __awaiter(void 0, void 0, void 0, function* () { + // The path to ci-info's pr.json. + const prPath = core.getInput("pull_request"); + const pr = prPath ? fs.readFileSync(prPath, "utf8") : ""; + const result = (0, exports.run)({ + labels: fs.readFileSync(core.getInput("labels"), "utf8").split("\n"), + config: lib.getConfig(), + isApply: lib.getIsApply(), + changedFiles: fs + .readFileSync(core.getInput("changed_files"), "utf8") + .split("\n"), + configFiles: fs + .readFileSync(core.getInput("config_files"), "utf8") + .split("\n"), + moduleFiles: fs + .readFileSync(core.getInput("module_files"), "utf8") + .split("\n"), + pr, + payload: github.context.payload, + /* + { + // caller is a directory where uses the module + module1: [caller1, caller2], + } + */ + moduleCallers: JSON.parse(core.getInput("module_callers") || "{}"), + }); + core.info(`result: ${JSON.stringify(result)}`); + core.setOutput("targets", result.targetConfigs); + core.setOutput("modules", result.modules); +}); +exports.main = main; + + +/***/ }), + +/***/ 9786: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.main = void 0; +const checkTerraformSkip = __importStar(__nccwpck_require__(1619)); +const conftest = __importStar(__nccwpck_require__(86)); +const exportAWSSecretsManager = __importStar(__nccwpck_require__(8584)); +const exportSecrets = __importStar(__nccwpck_require__(9518)); +const getGlobalConfig = __importStar(__nccwpck_require__(2829)); +const getTargetConfig = __importStar(__nccwpck_require__(3287)); +const listChangeModules = __importStar(__nccwpck_require__(7289)); +const listModuleCallers = __importStar(__nccwpck_require__(2042)); +const listTargetsWithChangedFiles = __importStar(__nccwpck_require__(9117)); +const main = (inputs) => __awaiter(void 0, void 0, void 0, function* () { + const actions = new Map([ + ["check-terraform-skip", checkTerraformSkip], + ["conftest", conftest], + ["export-aws-secrets-manager", exportAWSSecretsManager], + ["export-secrets", exportSecrets], + ["get-global-config", getGlobalConfig], + ["get-target-config", getTargetConfig], + ["list-changed-modules", listChangeModules], + ["list-module-callers", listModuleCallers], + ["list-targets-with-changed-files", listTargetsWithChangedFiles], + ]); + const action = actions.get(inputs.action); + if (action === undefined) { + throw new Error(`Unknown action: ${inputs.action}`); + } + action.main(); +}); +exports.main = main; + + +/***/ }), + +/***/ 9573: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.main = void 0; +const json_diff_1 = __nccwpck_require__(2078); +const main = () => __awaiter(void 0, void 0, void 0, function* () { + // Compare outputs + const testdata = [ + { + name: "get-target-config", + expected: { + working_directory: "js/test/aws/foo/dev", + aws_assume_role_arn: "arn:aws:iam::000000000000:role/GitHubActions_Terraform_AWS_terraform_plan", + aws_region: "ap-northeast-1", + // aws_role_session_name: "tfaction-plan-js_test_aws_foo_dev-12425638978", + aws_role_session_name: "", + gcp_service_account: "", + gcp_workload_identity_provider: "", + gcp_access_token_scopes: "", + s3_bucket_name_tfmigrate_history: "0000-0000-0000-suzuki-shunsuke-tfmigrate-history", + gcs_bucket_name_tfmigrate_history: "", + template_dir: "js/test/templates/aws", + providers_lock_opts: "-platform=windows_amd64 -platform=linux_amd64 -platform=darwin_amd64", + enable_tfsec: "false", + enable_tflint: "true", + enable_trivy: "true", + enable_terraform_docs: "false", + destroy: "false", + terraform_command: "terraform", + tflint_fix: "true", + }, + convert: (data) => { + data.aws_role_session_name = ""; + return data; + }, + actual: process.env.GET_TARGET_CONFIG, + }, + { + name: "js/get-target-config", + expected: { + working_directory: "js/test/aws/foo/dev", + providers_lock_opts: "-platform=windows_amd64 -platform=linux_amd64 -platform=darwin_amd64", + template_dir: "js/test/templates/aws", + enable_tfsec: "false", + enable_tflint: "true", + enable_trivy: "true", + tflint_fix: "true", + terraform_command: "terraform", + s3_bucket_name_tfmigrate_history: "0000-0000-0000-suzuki-shunsuke-tfmigrate-history", + aws_region: "ap-northeast-1", + aws_assume_role_arn: "arn:aws:iam::000000000000:role/GitHubActions_Terraform_AWS_terraform_plan", + // aws_role_session_name: "tfaction-plan-js_test_aws_foo_dev-12425638978", + aws_role_session_name: "", + destroy: "false", + enable_terraform_docs: "false", + }, + convert: (data) => { + data.aws_role_session_name = ""; + return data; + }, + actual: process.env.JS_TARGET_CONFIG, + }, + { + name: "get-global-config", + expected: { + base_working_directory: ".", + module_base_directory: ".", + working_directory_file: "tfaction.yaml", + module_file: "tfaction_module.yaml", + renovate_login: "renovate[bot]", + label_prefix_target: "target:", + label_prefix_tfmigrate: "migrate:", + label_prefix_skip: "skip:", + skip_create_pr: "false", + drift_issue_repo_owner: "suzuki-shunsuke", + drift_issue_repo_name: "tfaction", + enable_tfsec: "false", + enable_tflint: "true", + enable_trivy: "true", + tflint_fix: "true", + update_local_path_module_caller: "true", + aqua_update_checksum_enabled: "true", + aqua_update_checksum_skip_push: "false", + aqua_update_checksum_prune: "true", + plan_workflow_name: "test", + terraform_command: "terraform", + }, + actual: process.env.GET_GLOBAL_CONFIG, + }, + { + name: "js/get-global-config", + expected: { + base_working_directory: ".", + working_directory_file: "tfaction.yaml", + module_base_directory: ".", + module_file: "tfaction_module.yaml", + renovate_login: "renovate[bot]", + draft_pr: "false", + skip_create_pr: "false", + plan_workflow_name: "test", + label_prefix_target: "target:", + label_prefix_tfmigrate: "migrate:", + label_prefix_skip: "skip:", + disable_update_related_pull_requests: "false", + update_local_path_module_caller: "true", + aqua_update_checksum_enabled: "true", + aqua_update_checksum_prune: "true", + aqua_update_checksum_skip_push: "false", + enable_tfsec: "false", + enable_tflint: "true", + enable_trivy: "true", + tflint_fix: "true", + terraform_command: "terraform", + drift_issue_repo_owner: "suzuki-shunsuke", + drift_issue_repo_name: "tfaction", + }, + actual: process.env.JS_GLOBAL_CONFIG, + }, + { + name: "check-terraform-skip", + expected: { + skip_terraform: "false", + }, + actual: process.env.CHECK_TERRAFORM_SKIP, + }, + { + name: "js/check-terraform-skip", + expected: { + skip_terraform: "false", + }, + actual: process.env.JS_CHECK_TERRAFORM_SKIP, + }, + { + name: "list-changed-modules", + expected: { + modules: '["js/test/modules/foo"]', + }, + actual: process.env.LIST_CHANGED_MODULES, + }, + { + name: "js/list-changed-modules", + expected: { + modules: '["js/test/modules/foo"]', + }, + actual: process.env.JS_LIST_CHANGED_MODULES, + }, + { + name: "list-module-callers", + expected: { + file: '{"js/test/modules/foo":["js/test/aws/foo/dev"]}', + }, + actual: process.env.LIST_MODULE_CALLERS, + }, + { + name: "js/list-module-callers", + expected: { + file: '{"js/test/modules/foo":["js/test/aws/foo/dev"]}', + }, + actual: process.env.JS_LIST_MODULE_CALLERS, + }, + { + name: "list-targets-with-changed-files", + expected: { + targets: "[]", + modules: '["js/test/modules/foo"]', + }, + actual: process.env.LIST_TARGETS_WITH_CHANGED_FILES, + }, + { + name: "js/list-targets-with-changed-files", + expected: { + targets: "[]", + modules: '["js/test/modules/foo"]', + }, + actual: process.env.JS_LIST_TARGETS_WITH_CHANGED_FILES, + }, + ]; + let failed = false; + testdata.forEach((data) => { + let a = JSON.parse(data.actual || "{}"); + if (data.convert) { + a = data.convert(a); + } + const b = (0, json_diff_1.diffString)(data.expected, a); + if (b !== "") { + console.log(`Test failed: ${data.name}`); + console.log(b); + failed = true; + } + }); + if (failed) { + throw new Error("Test failed"); + } +}); +exports.main = main; + + +/***/ }), + +/***/ 2613: +/***/ ((module) => { + +"use strict"; +module.exports = require("assert"); + +/***/ }), + +/***/ 290: +/***/ ((module) => { + +"use strict"; +module.exports = require("async_hooks"); + +/***/ }), + +/***/ 181: +/***/ ((module) => { + +"use strict"; +module.exports = require("buffer"); + +/***/ }), + +/***/ 5317: +/***/ ((module) => { + +"use strict"; +module.exports = require("child_process"); + +/***/ }), + +/***/ 4236: +/***/ ((module) => { + +"use strict"; +module.exports = require("console"); + +/***/ }), + +/***/ 6982: +/***/ ((module) => { + +"use strict"; +module.exports = require("crypto"); + +/***/ }), + +/***/ 1637: +/***/ ((module) => { + +"use strict"; +module.exports = require("diagnostics_channel"); + +/***/ }), + +/***/ 4434: +/***/ ((module) => { + +"use strict"; +module.exports = require("events"); + +/***/ }), + +/***/ 9896: +/***/ ((module) => { + +"use strict"; +module.exports = require("fs"); + +/***/ }), + +/***/ 1943: +/***/ ((module) => { + +"use strict"; +module.exports = require("fs/promises"); + +/***/ }), + +/***/ 8611: +/***/ ((module) => { + +"use strict"; +module.exports = require("http"); + +/***/ }), + +/***/ 5675: +/***/ ((module) => { + +"use strict"; +module.exports = require("http2"); + +/***/ }), + +/***/ 5692: +/***/ ((module) => { + +"use strict"; +module.exports = require("https"); + +/***/ }), + +/***/ 9278: +/***/ ((module) => { + +"use strict"; +module.exports = require("net"); + +/***/ }), + +/***/ 8474: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:events"); + +/***/ }), + +/***/ 3024: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:fs"); + +/***/ }), + +/***/ 1455: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:fs/promises"); + +/***/ }), + +/***/ 6760: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:path"); + +/***/ }), + +/***/ 7075: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:stream"); + +/***/ }), + +/***/ 6193: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:string_decoder"); + +/***/ }), + +/***/ 3136: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:url"); + +/***/ }), + +/***/ 7975: +/***/ ((module) => { + +"use strict"; +module.exports = require("node:util"); + +/***/ }), + +/***/ 857: +/***/ ((module) => { + +"use strict"; +module.exports = require("os"); + +/***/ }), + +/***/ 6928: +/***/ ((module) => { + +"use strict"; +module.exports = require("path"); + +/***/ }), + +/***/ 2987: +/***/ ((module) => { + +"use strict"; +module.exports = require("perf_hooks"); + +/***/ }), + +/***/ 932: +/***/ ((module) => { + +"use strict"; +module.exports = require("process"); + +/***/ }), + +/***/ 3480: +/***/ ((module) => { + +"use strict"; +module.exports = require("querystring"); + +/***/ }), + +/***/ 2203: +/***/ ((module) => { + +"use strict"; +module.exports = require("stream"); + +/***/ }), + +/***/ 3774: +/***/ ((module) => { + +"use strict"; +module.exports = require("stream/web"); + +/***/ }), + +/***/ 3193: +/***/ ((module) => { + +"use strict"; +module.exports = require("string_decoder"); + +/***/ }), + +/***/ 3557: +/***/ ((module) => { + +"use strict"; +module.exports = require("timers"); + +/***/ }), + +/***/ 4756: +/***/ ((module) => { + +"use strict"; +module.exports = require("tls"); + +/***/ }), + +/***/ 7016: +/***/ ((module) => { + +"use strict"; +module.exports = require("url"); + +/***/ }), + +/***/ 9023: +/***/ ((module) => { + +"use strict"; +module.exports = require("util"); + +/***/ }), + +/***/ 8253: +/***/ ((module) => { + +"use strict"; +module.exports = require("util/types"); + +/***/ }), + +/***/ 8167: +/***/ ((module) => { + +"use strict"; +module.exports = require("worker_threads"); + +/***/ }), + +/***/ 3106: +/***/ ((module) => { + +"use strict"; +module.exports = require("zlib"); + +/***/ }), + +/***/ 7182: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const WritableStream = (__nccwpck_require__(7075).Writable) +const inherits = (__nccwpck_require__(7975).inherits) + +const StreamSearch = __nccwpck_require__(4136) + +const PartStream = __nccwpck_require__(612) +const HeaderParser = __nccwpck_require__(2271) + +const DASH = 45 +const B_ONEDASH = Buffer.from('-') +const B_CRLF = Buffer.from('\r\n') +const EMPTY_FN = function () {} + +function Dicer (cfg) { + if (!(this instanceof Dicer)) { return new Dicer(cfg) } + WritableStream.call(this, cfg) + + if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') } + + if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined } + + this._headerFirst = cfg.headerFirst + + this._dashes = 0 + this._parts = 0 + this._finished = false + this._realFinish = false + this._isPreamble = true + this._justMatched = false + this._firstWrite = true + this._inHeader = true + this._part = undefined + this._cb = undefined + this._ignoreData = false + this._partOpts = { highWaterMark: cfg.partHwm } + this._pause = false + + const self = this + this._hparser = new HeaderParser(cfg) + this._hparser.on('header', function (header) { + self._inHeader = false + self._part.emit('header', header) + }) +} +inherits(Dicer, WritableStream) + +Dicer.prototype.emit = function (ev) { + if (ev === 'finish' && !this._realFinish) { + if (!this._finished) { + const self = this + process.nextTick(function () { + self.emit('error', new Error('Unexpected end of multipart data')) + if (self._part && !self._ignoreData) { + const type = (self._isPreamble ? 'Preamble' : 'Part') + self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data')) + self._part.push(null) + process.nextTick(function () { + self._realFinish = true + self.emit('finish') + self._realFinish = false + }) + return + } + self._realFinish = true + self.emit('finish') + self._realFinish = false + }) + } + } else { WritableStream.prototype.emit.apply(this, arguments) } +} + +Dicer.prototype._write = function (data, encoding, cb) { + // ignore unexpected data (e.g. extra trailer data after finished) + if (!this._hparser && !this._bparser) { return cb() } + + if (this._headerFirst && this._isPreamble) { + if (!this._part) { + this._part = new PartStream(this._partOpts) + if (this.listenerCount('preamble') !== 0) { this.emit('preamble', this._part) } else { this._ignore() } + } + const r = this._hparser.push(data) + if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() } + } + + // allows for "easier" testing + if (this._firstWrite) { + this._bparser.push(B_CRLF) + this._firstWrite = false + } + + this._bparser.push(data) + + if (this._pause) { this._cb = cb } else { cb() } +} + +Dicer.prototype.reset = function () { + this._part = undefined + this._bparser = undefined + this._hparser = undefined +} + +Dicer.prototype.setBoundary = function (boundary) { + const self = this + this._bparser = new StreamSearch('\r\n--' + boundary) + this._bparser.on('info', function (isMatch, data, start, end) { + self._oninfo(isMatch, data, start, end) + }) +} + +Dicer.prototype._ignore = function () { + if (this._part && !this._ignoreData) { + this._ignoreData = true + this._part.on('error', EMPTY_FN) + // we must perform some kind of read on the stream even though we are + // ignoring the data, otherwise node's Readable stream will not emit 'end' + // after pushing null to the stream + this._part.resume() + } +} + +Dicer.prototype._oninfo = function (isMatch, data, start, end) { + let buf; const self = this; let i = 0; let r; let shouldWriteMore = true + + if (!this._part && this._justMatched && data) { + while (this._dashes < 2 && (start + i) < end) { + if (data[start + i] === DASH) { + ++i + ++this._dashes + } else { + if (this._dashes) { buf = B_ONEDASH } + this._dashes = 0 + break + } + } + if (this._dashes === 2) { + if ((start + i) < end && this.listenerCount('trailer') !== 0) { this.emit('trailer', data.slice(start + i, end)) } + this.reset() + this._finished = true + // no more parts will be added + if (self._parts === 0) { + self._realFinish = true + self.emit('finish') + self._realFinish = false + } + } + if (this._dashes) { return } + } + if (this._justMatched) { this._justMatched = false } + if (!this._part) { + this._part = new PartStream(this._partOpts) + this._part._read = function (n) { + self._unpause() + } + if (this._isPreamble && this.listenerCount('preamble') !== 0) { + this.emit('preamble', this._part) + } else if (this._isPreamble !== true && this.listenerCount('part') !== 0) { + this.emit('part', this._part) + } else { + this._ignore() + } + if (!this._isPreamble) { this._inHeader = true } + } + if (data && start < end && !this._ignoreData) { + if (this._isPreamble || !this._inHeader) { + if (buf) { shouldWriteMore = this._part.push(buf) } + shouldWriteMore = this._part.push(data.slice(start, end)) + if (!shouldWriteMore) { this._pause = true } + } else if (!this._isPreamble && this._inHeader) { + if (buf) { this._hparser.push(buf) } + r = this._hparser.push(data.slice(start, end)) + if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) } + } + } + if (isMatch) { + this._hparser.reset() + if (this._isPreamble) { this._isPreamble = false } else { + if (start !== end) { + ++this._parts + this._part.on('end', function () { + if (--self._parts === 0) { + if (self._finished) { + self._realFinish = true + self.emit('finish') + self._realFinish = false + } else { + self._unpause() + } + } + }) + } + } + this._part.push(null) + this._part = undefined + this._ignoreData = false + this._justMatched = true + this._dashes = 0 + } +} + +Dicer.prototype._unpause = function () { + if (!this._pause) { return } + + this._pause = false + if (this._cb) { + const cb = this._cb + this._cb = undefined + cb() + } +} + +module.exports = Dicer + + +/***/ }), + +/***/ 2271: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const EventEmitter = (__nccwpck_require__(8474).EventEmitter) +const inherits = (__nccwpck_require__(7975).inherits) +const getLimit = __nccwpck_require__(2393) + +const StreamSearch = __nccwpck_require__(4136) + +const B_DCRLF = Buffer.from('\r\n\r\n') +const RE_CRLF = /\r\n/g +const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex + +function HeaderParser (cfg) { + EventEmitter.call(this) + + cfg = cfg || {} + const self = this + this.nread = 0 + this.maxed = false + this.npairs = 0 + this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000) + this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024) + this.buffer = '' + this.header = {} + this.finished = false + this.ss = new StreamSearch(B_DCRLF) + this.ss.on('info', function (isMatch, data, start, end) { + if (data && !self.maxed) { + if (self.nread + end - start >= self.maxHeaderSize) { + end = self.maxHeaderSize - self.nread + start + self.nread = self.maxHeaderSize + self.maxed = true + } else { self.nread += (end - start) } + + self.buffer += data.toString('binary', start, end) + } + if (isMatch) { self._finish() } + }) +} +inherits(HeaderParser, EventEmitter) + +HeaderParser.prototype.push = function (data) { + const r = this.ss.push(data) + if (this.finished) { return r } +} + +HeaderParser.prototype.reset = function () { + this.finished = false + this.buffer = '' + this.header = {} + this.ss.reset() +} + +HeaderParser.prototype._finish = function () { + if (this.buffer) { this._parseHeader() } + this.ss.matches = this.ss.maxMatches + const header = this.header + this.header = {} + this.buffer = '' + this.finished = true + this.nread = this.npairs = 0 + this.maxed = false + this.emit('header', header) +} + +HeaderParser.prototype._parseHeader = function () { + if (this.npairs === this.maxHeaderPairs) { return } + + const lines = this.buffer.split(RE_CRLF) + const len = lines.length + let m, h + + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + if (lines[i].length === 0) { continue } + if (lines[i][0] === '\t' || lines[i][0] === ' ') { + // folded header content + // RFC2822 says to just remove the CRLF and not the whitespace following + // it, so we follow the RFC and include the leading whitespace ... + if (h) { + this.header[h][this.header[h].length - 1] += lines[i] + continue + } + } + + const posColon = lines[i].indexOf(':') + if ( + posColon === -1 || + posColon === 0 + ) { + return + } + m = RE_HDR.exec(lines[i]) + h = m[1].toLowerCase() + this.header[h] = this.header[h] || [] + this.header[h].push((m[2] || '')) + if (++this.npairs === this.maxHeaderPairs) { break } + } +} + +module.exports = HeaderParser + + +/***/ }), + +/***/ 612: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const inherits = (__nccwpck_require__(7975).inherits) +const ReadableStream = (__nccwpck_require__(7075).Readable) + +function PartStream (opts) { + ReadableStream.call(this, opts) +} +inherits(PartStream, ReadableStream) + +PartStream.prototype._read = function (n) {} + +module.exports = PartStream + + +/***/ }), + +/***/ 4136: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +/** + * Copyright Brian White. All rights reserved. + * + * @see https://github.com/mscdex/streamsearch + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation + * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool + */ +const EventEmitter = (__nccwpck_require__(8474).EventEmitter) +const inherits = (__nccwpck_require__(7975).inherits) + +function SBMH (needle) { + if (typeof needle === 'string') { + needle = Buffer.from(needle) + } + + if (!Buffer.isBuffer(needle)) { + throw new TypeError('The needle has to be a String or a Buffer.') + } + + const needleLength = needle.length + + if (needleLength === 0) { + throw new Error('The needle cannot be an empty String/Buffer.') + } + + if (needleLength > 256) { + throw new Error('The needle cannot have a length bigger than 256.') + } + + this.maxMatches = Infinity + this.matches = 0 + + this._occ = new Array(256) + .fill(needleLength) // Initialize occurrence table. + this._lookbehind_size = 0 + this._needle = needle + this._bufpos = 0 + + this._lookbehind = Buffer.alloc(needleLength) + + // Populate occurrence table with analysis of the needle, + // ignoring last letter. + for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var + this._occ[needle[i]] = needleLength - 1 - i + } +} +inherits(SBMH, EventEmitter) + +SBMH.prototype.reset = function () { + this._lookbehind_size = 0 + this.matches = 0 + this._bufpos = 0 +} + +SBMH.prototype.push = function (chunk, pos) { + if (!Buffer.isBuffer(chunk)) { + chunk = Buffer.from(chunk, 'binary') + } + const chlen = chunk.length + this._bufpos = pos || 0 + let r + while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) } + return r +} + +SBMH.prototype._sbmh_feed = function (data) { + const len = data.length + const needle = this._needle + const needleLength = needle.length + const lastNeedleChar = needle[needleLength - 1] + + // Positive: points to a position in `data` + // pos == 3 points to data[3] + // Negative: points to a position in the lookbehind buffer + // pos == -2 points to lookbehind[lookbehind_size - 2] + let pos = -this._lookbehind_size + let ch + + if (pos < 0) { + // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool + // search with character lookup code that considers both the + // lookbehind buffer and the current round's haystack data. + // + // Loop until + // there is a match. + // or until + // we've moved past the position that requires the + // lookbehind buffer. In this case we switch to the + // optimized loop. + // or until + // the character to look at lies outside the haystack. + while (pos < 0 && pos <= len - needleLength) { + ch = this._sbmh_lookup_char(data, pos + needleLength - 1) + + if ( + ch === lastNeedleChar && + this._sbmh_memcmp(data, pos, needleLength - 1) + ) { + this._lookbehind_size = 0 + ++this.matches + this.emit('info', true) + + return (this._bufpos = pos + needleLength) + } + pos += this._occ[ch] + } + + // No match. + + if (pos < 0) { + // There's too few data for Boyer-Moore-Horspool to run, + // so let's use a different algorithm to skip as much as + // we can. + // Forward pos until + // the trailing part of lookbehind + data + // looks like the beginning of the needle + // or until + // pos == 0 + while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos } + } + + if (pos >= 0) { + // Discard lookbehind buffer. + this.emit('info', false, this._lookbehind, 0, this._lookbehind_size) + this._lookbehind_size = 0 + } else { + // Cut off part of the lookbehind buffer that has + // been processed and append the entire haystack + // into it. + const bytesToCutOff = this._lookbehind_size + pos + if (bytesToCutOff > 0) { + // The cut off data is guaranteed not to contain the needle. + this.emit('info', false, this._lookbehind, 0, bytesToCutOff) + } + + this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff, + this._lookbehind_size - bytesToCutOff) + this._lookbehind_size -= bytesToCutOff + + data.copy(this._lookbehind, this._lookbehind_size) + this._lookbehind_size += len + + this._bufpos = len + return len + } + } + + pos += (pos >= 0) * this._bufpos + + // Lookbehind buffer is now empty. We only need to check if the + // needle is in the haystack. + if (data.indexOf(needle, pos) !== -1) { + pos = data.indexOf(needle, pos) + ++this.matches + if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) } + + return (this._bufpos = pos + needleLength) + } else { + pos = len - needleLength + } + + // There was no match. If there's trailing haystack data that we cannot + // match yet using the Boyer-Moore-Horspool algorithm (because the trailing + // data is less than the needle size) then match using a modified + // algorithm that starts matching from the beginning instead of the end. + // Whatever trailing data is left after running this algorithm is added to + // the lookbehind buffer. + while ( + pos < len && + ( + data[pos] !== needle[0] || + ( + (Buffer.compare( + data.subarray(pos, pos + len - pos), + needle.subarray(0, len - pos) + ) !== 0) + ) + ) + ) { + ++pos + } + if (pos < len) { + data.copy(this._lookbehind, 0, pos, pos + (len - pos)) + this._lookbehind_size = len - pos + } + + // Everything until pos is guaranteed not to contain needle data. + if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) } + + this._bufpos = len + return len +} + +SBMH.prototype._sbmh_lookup_char = function (data, pos) { + return (pos < 0) + ? this._lookbehind[this._lookbehind_size + pos] + : data[pos] +} + +SBMH.prototype._sbmh_memcmp = function (data, pos, len) { + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false } + } + return true +} + +module.exports = SBMH + + +/***/ }), + +/***/ 9581: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const WritableStream = (__nccwpck_require__(7075).Writable) +const { inherits } = __nccwpck_require__(7975) +const Dicer = __nccwpck_require__(7182) + +const MultipartParser = __nccwpck_require__(1192) +const UrlencodedParser = __nccwpck_require__(855) +const parseParams = __nccwpck_require__(8929) + +function Busboy (opts) { + if (!(this instanceof Busboy)) { return new Busboy(opts) } + + if (typeof opts !== 'object') { + throw new TypeError('Busboy expected an options-Object.') + } + if (typeof opts.headers !== 'object') { + throw new TypeError('Busboy expected an options-Object with headers-attribute.') + } + if (typeof opts.headers['content-type'] !== 'string') { + throw new TypeError('Missing Content-Type-header.') + } + + const { + headers, + ...streamOptions + } = opts + + this.opts = { + autoDestroy: false, + ...streamOptions + } + WritableStream.call(this, this.opts) + + this._done = false + this._parser = this.getParserByHeaders(headers) + this._finished = false +} +inherits(Busboy, WritableStream) + +Busboy.prototype.emit = function (ev) { + if (ev === 'finish') { + if (!this._done) { + this._parser?.end() + return + } else if (this._finished) { + return + } + this._finished = true + } + WritableStream.prototype.emit.apply(this, arguments) +} + +Busboy.prototype.getParserByHeaders = function (headers) { + const parsed = parseParams(headers['content-type']) + + const cfg = { + defCharset: this.opts.defCharset, + fileHwm: this.opts.fileHwm, + headers, + highWaterMark: this.opts.highWaterMark, + isPartAFile: this.opts.isPartAFile, + limits: this.opts.limits, + parsedConType: parsed, + preservePath: this.opts.preservePath + } + + if (MultipartParser.detect.test(parsed[0])) { + return new MultipartParser(this, cfg) + } + if (UrlencodedParser.detect.test(parsed[0])) { + return new UrlencodedParser(this, cfg) + } + throw new Error('Unsupported Content-Type.') +} + +Busboy.prototype._write = function (chunk, encoding, cb) { + this._parser.write(chunk, cb) +} + +module.exports = Busboy +module.exports["default"] = Busboy +module.exports.Busboy = Busboy + +module.exports.Dicer = Dicer + + +/***/ }), + +/***/ 1192: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +// TODO: +// * support 1 nested multipart level +// (see second multipart example here: +// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data) +// * support limits.fieldNameSize +// -- this will require modifications to utils.parseParams + +const { Readable } = __nccwpck_require__(7075) +const { inherits } = __nccwpck_require__(7975) + +const Dicer = __nccwpck_require__(7182) + +const parseParams = __nccwpck_require__(8929) +const decodeText = __nccwpck_require__(2747) +const basename = __nccwpck_require__(692) +const getLimit = __nccwpck_require__(2393) + +const RE_BOUNDARY = /^boundary$/i +const RE_FIELD = /^form-data$/i +const RE_CHARSET = /^charset$/i +const RE_FILENAME = /^filename$/i +const RE_NAME = /^name$/i + +Multipart.detect = /^multipart\/form-data/i +function Multipart (boy, cfg) { + let i + let len + const self = this + let boundary + const limits = cfg.limits + const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)) + const parsedConType = cfg.parsedConType || [] + const defCharset = cfg.defCharset || 'utf8' + const preservePath = cfg.preservePath + const fileOpts = { highWaterMark: cfg.fileHwm } + + for (i = 0, len = parsedConType.length; i < len; ++i) { + if (Array.isArray(parsedConType[i]) && + RE_BOUNDARY.test(parsedConType[i][0])) { + boundary = parsedConType[i][1] + break + } + } + + function checkFinished () { + if (nends === 0 && finished && !boy._done) { + finished = false + self.end() + } + } + + if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') } + + const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) + const fileSizeLimit = getLimit(limits, 'fileSize', Infinity) + const filesLimit = getLimit(limits, 'files', Infinity) + const fieldsLimit = getLimit(limits, 'fields', Infinity) + const partsLimit = getLimit(limits, 'parts', Infinity) + const headerPairsLimit = getLimit(limits, 'headerPairs', 2000) + const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024) + + let nfiles = 0 + let nfields = 0 + let nends = 0 + let curFile + let curField + let finished = false + + this._needDrain = false + this._pause = false + this._cb = undefined + this._nparts = 0 + this._boy = boy + + const parserCfg = { + boundary, + maxHeaderPairs: headerPairsLimit, + maxHeaderSize: headerSizeLimit, + partHwm: fileOpts.highWaterMark, + highWaterMark: cfg.highWaterMark + } + + this.parser = new Dicer(parserCfg) + this.parser.on('drain', function () { + self._needDrain = false + if (self._cb && !self._pause) { + const cb = self._cb + self._cb = undefined + cb() + } + }).on('part', function onPart (part) { + if (++self._nparts > partsLimit) { + self.parser.removeListener('part', onPart) + self.parser.on('part', skipPart) + boy.hitPartsLimit = true + boy.emit('partsLimit') + return skipPart(part) + } + + // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let + // us emit 'end' early since we know the part has ended if we are already + // seeing the next part + if (curField) { + const field = curField + field.emit('end') + field.removeAllListeners('end') + } + + part.on('header', function (header) { + let contype + let fieldname + let parsed + let charset + let encoding + let filename + let nsize = 0 + + if (header['content-type']) { + parsed = parseParams(header['content-type'][0]) + if (parsed[0]) { + contype = parsed[0].toLowerCase() + for (i = 0, len = parsed.length; i < len; ++i) { + if (RE_CHARSET.test(parsed[i][0])) { + charset = parsed[i][1].toLowerCase() + break + } + } + } + } + + if (contype === undefined) { contype = 'text/plain' } + if (charset === undefined) { charset = defCharset } + + if (header['content-disposition']) { + parsed = parseParams(header['content-disposition'][0]) + if (!RE_FIELD.test(parsed[0])) { return skipPart(part) } + for (i = 0, len = parsed.length; i < len; ++i) { + if (RE_NAME.test(parsed[i][0])) { + fieldname = parsed[i][1] + } else if (RE_FILENAME.test(parsed[i][0])) { + filename = parsed[i][1] + if (!preservePath) { filename = basename(filename) } + } + } + } else { return skipPart(part) } + + if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' } + + let onData, + onEnd + + if (isPartAFile(fieldname, contype, filename)) { + // file/binary field + if (nfiles === filesLimit) { + if (!boy.hitFilesLimit) { + boy.hitFilesLimit = true + boy.emit('filesLimit') + } + return skipPart(part) + } + + ++nfiles + + if (boy.listenerCount('file') === 0) { + self.parser._ignore() + return + } + + ++nends + const file = new FileStream(fileOpts) + curFile = file + file.on('end', function () { + --nends + self._pause = false + checkFinished() + if (self._cb && !self._needDrain) { + const cb = self._cb + self._cb = undefined + cb() + } + }) + file._read = function (n) { + if (!self._pause) { return } + self._pause = false + if (self._cb && !self._needDrain) { + const cb = self._cb + self._cb = undefined + cb() + } + } + boy.emit('file', fieldname, file, filename, encoding, contype) + + onData = function (data) { + if ((nsize += data.length) > fileSizeLimit) { + const extralen = fileSizeLimit - nsize + data.length + if (extralen > 0) { file.push(data.slice(0, extralen)) } + file.truncated = true + file.bytesRead = fileSizeLimit + part.removeAllListeners('data') + file.emit('limit') + return + } else if (!file.push(data)) { self._pause = true } + + file.bytesRead = nsize + } + + onEnd = function () { + curFile = undefined + file.push(null) + } + } else { + // non-file field + if (nfields === fieldsLimit) { + if (!boy.hitFieldsLimit) { + boy.hitFieldsLimit = true + boy.emit('fieldsLimit') + } + return skipPart(part) + } + + ++nfields + ++nends + let buffer = '' + let truncated = false + curField = part + + onData = function (data) { + if ((nsize += data.length) > fieldSizeLimit) { + const extralen = (fieldSizeLimit - (nsize - data.length)) + buffer += data.toString('binary', 0, extralen) + truncated = true + part.removeAllListeners('data') + } else { buffer += data.toString('binary') } + } + + onEnd = function () { + curField = undefined + if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) } + boy.emit('field', fieldname, buffer, false, truncated, encoding, contype) + --nends + checkFinished() + } + } + + /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become + broken. Streams2/streams3 is a huge black box of confusion, but + somehow overriding the sync state seems to fix things again (and still + seems to work for previous node versions). + */ + part._readableState.sync = false + + part.on('data', onData) + part.on('end', onEnd) + }).on('error', function (err) { + if (curFile) { curFile.emit('error', err) } + }) + }).on('error', function (err) { + boy.emit('error', err) + }).on('finish', function () { + finished = true + checkFinished() + }) +} + +Multipart.prototype.write = function (chunk, cb) { + const r = this.parser.write(chunk) + if (r && !this._pause) { + cb() + } else { + this._needDrain = !r + this._cb = cb + } +} + +Multipart.prototype.end = function () { + const self = this + + if (self.parser.writable) { + self.parser.end() + } else if (!self._boy._done) { + process.nextTick(function () { + self._boy._done = true + self._boy.emit('finish') + }) + } +} + +function skipPart (part) { + part.resume() +} + +function FileStream (opts) { + Readable.call(this, opts) + + this.bytesRead = 0 + + this.truncated = false +} + +inherits(FileStream, Readable) + +FileStream.prototype._read = function (n) {} + +module.exports = Multipart + + +/***/ }), + +/***/ 855: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +const Decoder = __nccwpck_require__(1496) +const decodeText = __nccwpck_require__(2747) +const getLimit = __nccwpck_require__(2393) + +const RE_CHARSET = /^charset$/i + +UrlEncoded.detect = /^application\/x-www-form-urlencoded/i +function UrlEncoded (boy, cfg) { + const limits = cfg.limits + const parsedConType = cfg.parsedConType + this.boy = boy + + this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) + this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100) + this.fieldsLimit = getLimit(limits, 'fields', Infinity) + + let charset + for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var + if (Array.isArray(parsedConType[i]) && + RE_CHARSET.test(parsedConType[i][0])) { + charset = parsedConType[i][1].toLowerCase() + break + } + } + + if (charset === undefined) { charset = cfg.defCharset || 'utf8' } + + this.decoder = new Decoder() + this.charset = charset + this._fields = 0 + this._state = 'key' + this._checkingBytes = true + this._bytesKey = 0 + this._bytesVal = 0 + this._key = '' + this._val = '' + this._keyTrunc = false + this._valTrunc = false + this._hitLimit = false +} + +UrlEncoded.prototype.write = function (data, cb) { + if (this._fields === this.fieldsLimit) { + if (!this.boy.hitFieldsLimit) { + this.boy.hitFieldsLimit = true + this.boy.emit('fieldsLimit') + } + return cb() + } + + let idxeq; let idxamp; let i; let p = 0; const len = data.length + + while (p < len) { + if (this._state === 'key') { + idxeq = idxamp = undefined + for (i = p; i < len; ++i) { + if (!this._checkingBytes) { ++p } + if (data[i] === 0x3D/* = */) { + idxeq = i + break + } else if (data[i] === 0x26/* & */) { + idxamp = i + break + } + if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) { + this._hitLimit = true + break + } else if (this._checkingBytes) { ++this._bytesKey } + } + + if (idxeq !== undefined) { + // key with assignment + if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) } + this._state = 'val' + + this._hitLimit = false + this._checkingBytes = true + this._val = '' + this._bytesVal = 0 + this._valTrunc = false + this.decoder.reset() + + p = idxeq + 1 + } else if (idxamp !== undefined) { + // key with no assignment + ++this._fields + let key; const keyTrunc = this._keyTrunc + if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key } + + this._hitLimit = false + this._checkingBytes = true + this._key = '' + this._bytesKey = 0 + this._keyTrunc = false + this.decoder.reset() + + if (key.length) { + this.boy.emit('field', decodeText(key, 'binary', this.charset), + '', + keyTrunc, + false) + } + + p = idxamp + 1 + if (this._fields === this.fieldsLimit) { return cb() } + } else if (this._hitLimit) { + // we may not have hit the actual limit if there are encoded bytes... + if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) } + p = i + if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) { + // yep, we actually did hit the limit + this._checkingBytes = false + this._keyTrunc = true + } + } else { + if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) } + p = len + } + } else { + idxamp = undefined + for (i = p; i < len; ++i) { + if (!this._checkingBytes) { ++p } + if (data[i] === 0x26/* & */) { + idxamp = i + break + } + if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) { + this._hitLimit = true + break + } else if (this._checkingBytes) { ++this._bytesVal } + } + + if (idxamp !== undefined) { + ++this._fields + if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) } + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + decodeText(this._val, 'binary', this.charset), + this._keyTrunc, + this._valTrunc) + this._state = 'key' + + this._hitLimit = false + this._checkingBytes = true + this._key = '' + this._bytesKey = 0 + this._keyTrunc = false + this.decoder.reset() + + p = idxamp + 1 + if (this._fields === this.fieldsLimit) { return cb() } + } else if (this._hitLimit) { + // we may not have hit the actual limit if there are encoded bytes... + if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) } + p = i + if ((this._val === '' && this.fieldSizeLimit === 0) || + (this._bytesVal = this._val.length) === this.fieldSizeLimit) { + // yep, we actually did hit the limit + this._checkingBytes = false + this._valTrunc = true + } + } else { + if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) } + p = len + } + } + } + cb() +} + +UrlEncoded.prototype.end = function () { + if (this.boy._done) { return } + + if (this._state === 'key' && this._key.length > 0) { + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + '', + this._keyTrunc, + false) + } else if (this._state === 'val') { + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + decodeText(this._val, 'binary', this.charset), + this._keyTrunc, + this._valTrunc) + } + this.boy._done = true + this.boy.emit('finish') +} + +module.exports = UrlEncoded + + +/***/ }), + +/***/ 1496: +/***/ ((module) => { + +"use strict"; + + +const RE_PLUS = /\+/g + +const HEX = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +] + +function Decoder () { + this.buffer = undefined +} +Decoder.prototype.write = function (str) { + // Replace '+' with ' ' before decoding + str = str.replace(RE_PLUS, ' ') + let res = '' + let i = 0; let p = 0; const len = str.length + for (; i < len; ++i) { + if (this.buffer !== undefined) { + if (!HEX[str.charCodeAt(i)]) { + res += '%' + this.buffer + this.buffer = undefined + --i // retry character + } else { + this.buffer += str[i] + ++p + if (this.buffer.length === 2) { + res += String.fromCharCode(parseInt(this.buffer, 16)) + this.buffer = undefined + } + } + } else if (str[i] === '%') { + if (i > p) { + res += str.substring(p, i) + p = i + } + this.buffer = '' + ++p + } + } + if (p < len && this.buffer === undefined) { res += str.substring(p) } + return res +} +Decoder.prototype.reset = function () { + this.buffer = undefined +} + +module.exports = Decoder + + +/***/ }), + +/***/ 692: +/***/ ((module) => { + +"use strict"; + + +module.exports = function basename (path) { + if (typeof path !== 'string') { return '' } + for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var + switch (path.charCodeAt(i)) { + case 0x2F: // '/' + case 0x5C: // '\' + path = path.slice(i + 1) + return (path === '..' || path === '.' ? '' : path) + } + } + return (path === '..' || path === '.' ? '' : path) +} + + +/***/ }), + +/***/ 2747: +/***/ (function(module) { + +"use strict"; + + +// Node has always utf-8 +const utf8Decoder = new TextDecoder('utf-8') +const textDecoders = new Map([ + ['utf-8', utf8Decoder], + ['utf8', utf8Decoder] +]) + +function getDecoder (charset) { + let lc + while (true) { + switch (charset) { + case 'utf-8': + case 'utf8': + return decoders.utf8 + case 'latin1': + case 'ascii': // TODO: Make these a separate, strict decoder? + case 'us-ascii': + case 'iso-8859-1': + case 'iso8859-1': + case 'iso88591': + case 'iso_8859-1': + case 'windows-1252': + case 'iso_8859-1:1987': + case 'cp1252': + case 'x-cp1252': + return decoders.latin1 + case 'utf16le': + case 'utf-16le': + case 'ucs2': + case 'ucs-2': + return decoders.utf16le + case 'base64': + return decoders.base64 + default: + if (lc === undefined) { + lc = true + charset = charset.toLowerCase() + continue + } + return decoders.other.bind(charset) + } + } +} + +const decoders = { + utf8: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.utf8Slice(0, data.length) + }, + + latin1: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + return data + } + return data.latin1Slice(0, data.length) + }, + + utf16le: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.ucs2Slice(0, data.length) + }, + + base64: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.base64Slice(0, data.length) + }, + + other: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + + if (textDecoders.has(this.toString())) { + try { + return textDecoders.get(this).decode(data) + } catch {} + } + return typeof data === 'string' + ? data + : data.toString() + } +} + +function decodeText (text, sourceEncoding, destEncoding) { + if (text) { + return getDecoder(destEncoding)(text, sourceEncoding) + } + return text +} + +module.exports = decodeText + + +/***/ }), + +/***/ 2393: +/***/ ((module) => { + +"use strict"; + + +module.exports = function getLimit (limits, name, defaultLimit) { + if ( + !limits || + limits[name] === undefined || + limits[name] === null + ) { return defaultLimit } + + if ( + typeof limits[name] !== 'number' || + isNaN(limits[name]) + ) { throw new TypeError('Limit ' + name + ' is not a valid number') } + + return limits[name] +} + + +/***/ }), + +/***/ 8929: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; +/* eslint-disable object-property-newline */ + + +const decodeText = __nccwpck_require__(2747) + +const RE_ENCODED = /%[a-fA-F0-9][a-fA-F0-9]/g + +const EncodedLookup = { + '%00': '\x00', '%01': '\x01', '%02': '\x02', '%03': '\x03', '%04': '\x04', + '%05': '\x05', '%06': '\x06', '%07': '\x07', '%08': '\x08', '%09': '\x09', + '%0a': '\x0a', '%0A': '\x0a', '%0b': '\x0b', '%0B': '\x0b', '%0c': '\x0c', + '%0C': '\x0c', '%0d': '\x0d', '%0D': '\x0d', '%0e': '\x0e', '%0E': '\x0e', + '%0f': '\x0f', '%0F': '\x0f', '%10': '\x10', '%11': '\x11', '%12': '\x12', + '%13': '\x13', '%14': '\x14', '%15': '\x15', '%16': '\x16', '%17': '\x17', + '%18': '\x18', '%19': '\x19', '%1a': '\x1a', '%1A': '\x1a', '%1b': '\x1b', + '%1B': '\x1b', '%1c': '\x1c', '%1C': '\x1c', '%1d': '\x1d', '%1D': '\x1d', + '%1e': '\x1e', '%1E': '\x1e', '%1f': '\x1f', '%1F': '\x1f', '%20': '\x20', + '%21': '\x21', '%22': '\x22', '%23': '\x23', '%24': '\x24', '%25': '\x25', + '%26': '\x26', '%27': '\x27', '%28': '\x28', '%29': '\x29', '%2a': '\x2a', + '%2A': '\x2a', '%2b': '\x2b', '%2B': '\x2b', '%2c': '\x2c', '%2C': '\x2c', + '%2d': '\x2d', '%2D': '\x2d', '%2e': '\x2e', '%2E': '\x2e', '%2f': '\x2f', + '%2F': '\x2f', '%30': '\x30', '%31': '\x31', '%32': '\x32', '%33': '\x33', + '%34': '\x34', '%35': '\x35', '%36': '\x36', '%37': '\x37', '%38': '\x38', + '%39': '\x39', '%3a': '\x3a', '%3A': '\x3a', '%3b': '\x3b', '%3B': '\x3b', + '%3c': '\x3c', '%3C': '\x3c', '%3d': '\x3d', '%3D': '\x3d', '%3e': '\x3e', + '%3E': '\x3e', '%3f': '\x3f', '%3F': '\x3f', '%40': '\x40', '%41': '\x41', + '%42': '\x42', '%43': '\x43', '%44': '\x44', '%45': '\x45', '%46': '\x46', + '%47': '\x47', '%48': '\x48', '%49': '\x49', '%4a': '\x4a', '%4A': '\x4a', + '%4b': '\x4b', '%4B': '\x4b', '%4c': '\x4c', '%4C': '\x4c', '%4d': '\x4d', + '%4D': '\x4d', '%4e': '\x4e', '%4E': '\x4e', '%4f': '\x4f', '%4F': '\x4f', + '%50': '\x50', '%51': '\x51', '%52': '\x52', '%53': '\x53', '%54': '\x54', + '%55': '\x55', '%56': '\x56', '%57': '\x57', '%58': '\x58', '%59': '\x59', + '%5a': '\x5a', '%5A': '\x5a', '%5b': '\x5b', '%5B': '\x5b', '%5c': '\x5c', + '%5C': '\x5c', '%5d': '\x5d', '%5D': '\x5d', '%5e': '\x5e', '%5E': '\x5e', + '%5f': '\x5f', '%5F': '\x5f', '%60': '\x60', '%61': '\x61', '%62': '\x62', + '%63': '\x63', '%64': '\x64', '%65': '\x65', '%66': '\x66', '%67': '\x67', + '%68': '\x68', '%69': '\x69', '%6a': '\x6a', '%6A': '\x6a', '%6b': '\x6b', + '%6B': '\x6b', '%6c': '\x6c', '%6C': '\x6c', '%6d': '\x6d', '%6D': '\x6d', + '%6e': '\x6e', '%6E': '\x6e', '%6f': '\x6f', '%6F': '\x6f', '%70': '\x70', + '%71': '\x71', '%72': '\x72', '%73': '\x73', '%74': '\x74', '%75': '\x75', + '%76': '\x76', '%77': '\x77', '%78': '\x78', '%79': '\x79', '%7a': '\x7a', + '%7A': '\x7a', '%7b': '\x7b', '%7B': '\x7b', '%7c': '\x7c', '%7C': '\x7c', + '%7d': '\x7d', '%7D': '\x7d', '%7e': '\x7e', '%7E': '\x7e', '%7f': '\x7f', + '%7F': '\x7f', '%80': '\x80', '%81': '\x81', '%82': '\x82', '%83': '\x83', + '%84': '\x84', '%85': '\x85', '%86': '\x86', '%87': '\x87', '%88': '\x88', + '%89': '\x89', '%8a': '\x8a', '%8A': '\x8a', '%8b': '\x8b', '%8B': '\x8b', + '%8c': '\x8c', '%8C': '\x8c', '%8d': '\x8d', '%8D': '\x8d', '%8e': '\x8e', + '%8E': '\x8e', '%8f': '\x8f', '%8F': '\x8f', '%90': '\x90', '%91': '\x91', + '%92': '\x92', '%93': '\x93', '%94': '\x94', '%95': '\x95', '%96': '\x96', + '%97': '\x97', '%98': '\x98', '%99': '\x99', '%9a': '\x9a', '%9A': '\x9a', + '%9b': '\x9b', '%9B': '\x9b', '%9c': '\x9c', '%9C': '\x9c', '%9d': '\x9d', + '%9D': '\x9d', '%9e': '\x9e', '%9E': '\x9e', '%9f': '\x9f', '%9F': '\x9f', + '%a0': '\xa0', '%A0': '\xa0', '%a1': '\xa1', '%A1': '\xa1', '%a2': '\xa2', + '%A2': '\xa2', '%a3': '\xa3', '%A3': '\xa3', '%a4': '\xa4', '%A4': '\xa4', + '%a5': '\xa5', '%A5': '\xa5', '%a6': '\xa6', '%A6': '\xa6', '%a7': '\xa7', + '%A7': '\xa7', '%a8': '\xa8', '%A8': '\xa8', '%a9': '\xa9', '%A9': '\xa9', + '%aa': '\xaa', '%Aa': '\xaa', '%aA': '\xaa', '%AA': '\xaa', '%ab': '\xab', + '%Ab': '\xab', '%aB': '\xab', '%AB': '\xab', '%ac': '\xac', '%Ac': '\xac', + '%aC': '\xac', '%AC': '\xac', '%ad': '\xad', '%Ad': '\xad', '%aD': '\xad', + '%AD': '\xad', '%ae': '\xae', '%Ae': '\xae', '%aE': '\xae', '%AE': '\xae', + '%af': '\xaf', '%Af': '\xaf', '%aF': '\xaf', '%AF': '\xaf', '%b0': '\xb0', + '%B0': '\xb0', '%b1': '\xb1', '%B1': '\xb1', '%b2': '\xb2', '%B2': '\xb2', + '%b3': '\xb3', '%B3': '\xb3', '%b4': '\xb4', '%B4': '\xb4', '%b5': '\xb5', + '%B5': '\xb5', '%b6': '\xb6', '%B6': '\xb6', '%b7': '\xb7', '%B7': '\xb7', + '%b8': '\xb8', '%B8': '\xb8', '%b9': '\xb9', '%B9': '\xb9', '%ba': '\xba', + '%Ba': '\xba', '%bA': '\xba', '%BA': '\xba', '%bb': '\xbb', '%Bb': '\xbb', + '%bB': '\xbb', '%BB': '\xbb', '%bc': '\xbc', '%Bc': '\xbc', '%bC': '\xbc', + '%BC': '\xbc', '%bd': '\xbd', '%Bd': '\xbd', '%bD': '\xbd', '%BD': '\xbd', + '%be': '\xbe', '%Be': '\xbe', '%bE': '\xbe', '%BE': '\xbe', '%bf': '\xbf', + '%Bf': '\xbf', '%bF': '\xbf', '%BF': '\xbf', '%c0': '\xc0', '%C0': '\xc0', + '%c1': '\xc1', '%C1': '\xc1', '%c2': '\xc2', '%C2': '\xc2', '%c3': '\xc3', + '%C3': '\xc3', '%c4': '\xc4', '%C4': '\xc4', '%c5': '\xc5', '%C5': '\xc5', + '%c6': '\xc6', '%C6': '\xc6', '%c7': '\xc7', '%C7': '\xc7', '%c8': '\xc8', + '%C8': '\xc8', '%c9': '\xc9', '%C9': '\xc9', '%ca': '\xca', '%Ca': '\xca', + '%cA': '\xca', '%CA': '\xca', '%cb': '\xcb', '%Cb': '\xcb', '%cB': '\xcb', + '%CB': '\xcb', '%cc': '\xcc', '%Cc': '\xcc', '%cC': '\xcc', '%CC': '\xcc', + '%cd': '\xcd', '%Cd': '\xcd', '%cD': '\xcd', '%CD': '\xcd', '%ce': '\xce', + '%Ce': '\xce', '%cE': '\xce', '%CE': '\xce', '%cf': '\xcf', '%Cf': '\xcf', + '%cF': '\xcf', '%CF': '\xcf', '%d0': '\xd0', '%D0': '\xd0', '%d1': '\xd1', + '%D1': '\xd1', '%d2': '\xd2', '%D2': '\xd2', '%d3': '\xd3', '%D3': '\xd3', + '%d4': '\xd4', '%D4': '\xd4', '%d5': '\xd5', '%D5': '\xd5', '%d6': '\xd6', + '%D6': '\xd6', '%d7': '\xd7', '%D7': '\xd7', '%d8': '\xd8', '%D8': '\xd8', + '%d9': '\xd9', '%D9': '\xd9', '%da': '\xda', '%Da': '\xda', '%dA': '\xda', + '%DA': '\xda', '%db': '\xdb', '%Db': '\xdb', '%dB': '\xdb', '%DB': '\xdb', + '%dc': '\xdc', '%Dc': '\xdc', '%dC': '\xdc', '%DC': '\xdc', '%dd': '\xdd', + '%Dd': '\xdd', '%dD': '\xdd', '%DD': '\xdd', '%de': '\xde', '%De': '\xde', + '%dE': '\xde', '%DE': '\xde', '%df': '\xdf', '%Df': '\xdf', '%dF': '\xdf', + '%DF': '\xdf', '%e0': '\xe0', '%E0': '\xe0', '%e1': '\xe1', '%E1': '\xe1', + '%e2': '\xe2', '%E2': '\xe2', '%e3': '\xe3', '%E3': '\xe3', '%e4': '\xe4', + '%E4': '\xe4', '%e5': '\xe5', '%E5': '\xe5', '%e6': '\xe6', '%E6': '\xe6', + '%e7': '\xe7', '%E7': '\xe7', '%e8': '\xe8', '%E8': '\xe8', '%e9': '\xe9', + '%E9': '\xe9', '%ea': '\xea', '%Ea': '\xea', '%eA': '\xea', '%EA': '\xea', + '%eb': '\xeb', '%Eb': '\xeb', '%eB': '\xeb', '%EB': '\xeb', '%ec': '\xec', + '%Ec': '\xec', '%eC': '\xec', '%EC': '\xec', '%ed': '\xed', '%Ed': '\xed', + '%eD': '\xed', '%ED': '\xed', '%ee': '\xee', '%Ee': '\xee', '%eE': '\xee', + '%EE': '\xee', '%ef': '\xef', '%Ef': '\xef', '%eF': '\xef', '%EF': '\xef', + '%f0': '\xf0', '%F0': '\xf0', '%f1': '\xf1', '%F1': '\xf1', '%f2': '\xf2', + '%F2': '\xf2', '%f3': '\xf3', '%F3': '\xf3', '%f4': '\xf4', '%F4': '\xf4', + '%f5': '\xf5', '%F5': '\xf5', '%f6': '\xf6', '%F6': '\xf6', '%f7': '\xf7', + '%F7': '\xf7', '%f8': '\xf8', '%F8': '\xf8', '%f9': '\xf9', '%F9': '\xf9', + '%fa': '\xfa', '%Fa': '\xfa', '%fA': '\xfa', '%FA': '\xfa', '%fb': '\xfb', + '%Fb': '\xfb', '%fB': '\xfb', '%FB': '\xfb', '%fc': '\xfc', '%Fc': '\xfc', + '%fC': '\xfc', '%FC': '\xfc', '%fd': '\xfd', '%Fd': '\xfd', '%fD': '\xfd', + '%FD': '\xfd', '%fe': '\xfe', '%Fe': '\xfe', '%fE': '\xfe', '%FE': '\xfe', + '%ff': '\xff', '%Ff': '\xff', '%fF': '\xff', '%FF': '\xff' +} + +function encodedReplacer (match) { + return EncodedLookup[match] +} + +const STATE_KEY = 0 +const STATE_VALUE = 1 +const STATE_CHARSET = 2 +const STATE_LANG = 3 + +function parseParams (str) { + const res = [] + let state = STATE_KEY + let charset = '' + let inquote = false + let escaping = false + let p = 0 + let tmp = '' + const len = str.length + + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + const char = str[i] + if (char === '\\' && inquote) { + if (escaping) { escaping = false } else { + escaping = true + continue + } + } else if (char === '"') { + if (!escaping) { + if (inquote) { + inquote = false + state = STATE_KEY + } else { inquote = true } + continue + } else { escaping = false } + } else { + if (escaping && inquote) { tmp += '\\' } + escaping = false + if ((state === STATE_CHARSET || state === STATE_LANG) && char === "'") { + if (state === STATE_CHARSET) { + state = STATE_LANG + charset = tmp.substring(1) + } else { state = STATE_VALUE } + tmp = '' + continue + } else if (state === STATE_KEY && + (char === '*' || char === '=') && + res.length) { + state = char === '*' + ? STATE_CHARSET + : STATE_VALUE + res[p] = [tmp, undefined] + tmp = '' + continue + } else if (!inquote && char === ';') { + state = STATE_KEY + if (charset) { + if (tmp.length) { + tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), + 'binary', + charset) + } + charset = '' + } else if (tmp.length) { + tmp = decodeText(tmp, 'binary', 'utf8') + } + if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp } + tmp = '' + ++p + continue + } else if (!inquote && (char === ' ' || char === '\t')) { continue } + } + tmp += char + } + if (charset && tmp.length) { + tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), + 'binary', + charset) + } else if (tmp) { + tmp = decodeText(tmp, 'binary', 'utf8') + } + + if (res[p] === undefined) { + if (tmp) { res[p] = tmp } + } else { res[p][1] = tmp } + + return res +} + +module.exports = parseParams + + +/***/ }), + +/***/ 2981: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Glob = void 0; +const minimatch_1 = __nccwpck_require__(6507); +const node_url_1 = __nccwpck_require__(3136); +const path_scurry_1 = __nccwpck_require__(6577); +const pattern_js_1 = __nccwpck_require__(7813); +const walker_js_1 = __nccwpck_require__(1157); +// if no process global, just call it linux. +// so we default to case-sensitive, / separators +const defaultPlatform = (typeof process === 'object' && + process && + typeof process.platform === 'string') ? + process.platform + : 'linux'; +/** + * An object that can perform glob pattern traversals. + */ +class Glob { + absolute; + cwd; + root; + dot; + dotRelative; + follow; + ignore; + magicalBraces; + mark; + matchBase; + maxDepth; + nobrace; + nocase; + nodir; + noext; + noglobstar; + pattern; + platform; + realpath; + scurry; + stat; + signal; + windowsPathsNoEscape; + withFileTypes; + includeChildMatches; + /** + * The options provided to the constructor. + */ + opts; + /** + * An array of parsed immutable {@link Pattern} objects. + */ + patterns; + /** + * All options are stored as properties on the `Glob` object. + * + * See {@link GlobOptions} for full options descriptions. + * + * Note that a previous `Glob` object can be passed as the + * `GlobOptions` to another `Glob` instantiation to re-use settings + * and caches with a new pattern. + * + * Traversal functions can be called multiple times to run the walk + * again. + */ + constructor(pattern, opts) { + /* c8 ignore start */ + if (!opts) + throw new TypeError('glob options required'); + /* c8 ignore stop */ + this.withFileTypes = !!opts.withFileTypes; + this.signal = opts.signal; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.dotRelative = !!opts.dotRelative; + this.nodir = !!opts.nodir; + this.mark = !!opts.mark; + if (!opts.cwd) { + this.cwd = ''; + } + else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) { + opts.cwd = (0, node_url_1.fileURLToPath)(opts.cwd); + } + this.cwd = opts.cwd || ''; + this.root = opts.root; + this.magicalBraces = !!opts.magicalBraces; + this.nobrace = !!opts.nobrace; + this.noext = !!opts.noext; + this.realpath = !!opts.realpath; + this.absolute = opts.absolute; + this.includeChildMatches = opts.includeChildMatches !== false; + this.noglobstar = !!opts.noglobstar; + this.matchBase = !!opts.matchBase; + this.maxDepth = + typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity; + this.stat = !!opts.stat; + this.ignore = opts.ignore; + if (this.withFileTypes && this.absolute !== undefined) { + throw new Error('cannot set absolute and withFileTypes:true'); + } + if (typeof pattern === 'string') { + pattern = [pattern]; + } + this.windowsPathsNoEscape = + !!opts.windowsPathsNoEscape || + opts.allowWindowsEscape === + false; + if (this.windowsPathsNoEscape) { + pattern = pattern.map(p => p.replace(/\\/g, '/')); + } + if (this.matchBase) { + if (opts.noglobstar) { + throw new TypeError('base matching requires globstar'); + } + pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`)); + } + this.pattern = pattern; + this.platform = opts.platform || defaultPlatform; + this.opts = { ...opts, platform: this.platform }; + if (opts.scurry) { + this.scurry = opts.scurry; + if (opts.nocase !== undefined && + opts.nocase !== opts.scurry.nocase) { + throw new Error('nocase option contradicts provided scurry option'); + } + } + else { + const Scurry = opts.platform === 'win32' ? path_scurry_1.PathScurryWin32 + : opts.platform === 'darwin' ? path_scurry_1.PathScurryDarwin + : opts.platform ? path_scurry_1.PathScurryPosix + : path_scurry_1.PathScurry; + this.scurry = new Scurry(this.cwd, { + nocase: opts.nocase, + fs: opts.fs, + }); + } + this.nocase = this.scurry.nocase; + // If you do nocase:true on a case-sensitive file system, then + // we need to use regexps instead of strings for non-magic + // path portions, because statting `aBc` won't return results + // for the file `AbC` for example. + const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32'; + const mmo = { + // default nocase based on platform + ...opts, + dot: this.dot, + matchBase: this.matchBase, + nobrace: this.nobrace, + nocase: this.nocase, + nocaseMagicOnly, + nocomment: true, + noext: this.noext, + nonegate: true, + optimizationLevel: 2, + platform: this.platform, + windowsPathsNoEscape: this.windowsPathsNoEscape, + debug: !!this.opts.debug, + }; + const mms = this.pattern.map(p => new minimatch_1.Minimatch(p, mmo)); + const [matchSet, globParts] = mms.reduce((set, m) => { + set[0].push(...m.set); + set[1].push(...m.globParts); + return set; + }, [[], []]); + this.patterns = matchSet.map((set, i) => { + const g = globParts[i]; + /* c8 ignore start */ + if (!g) + throw new Error('invalid pattern object'); + /* c8 ignore stop */ + return new pattern_js_1.Pattern(set, g, 0, this.platform); + }); + } + async walk() { + // Walkers always return array of Path objects, so we just have to + // coerce them into the right shape. It will have already called + // realpath() if the option was set to do so, so we know that's cached. + // start out knowing the cwd, at least + return [ + ...(await new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).walk()), + ]; + } + walkSync() { + return [ + ...new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).walkSync(), + ]; + } + stream() { + return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).stream(); + } + streamSync() { + return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).streamSync(); + } + /** + * Default sync iteration function. Returns a Generator that + * iterates over the results. + */ + iterateSync() { + return this.streamSync()[Symbol.iterator](); + } + [Symbol.iterator]() { + return this.iterateSync(); + } + /** + * Default async iteration function. Returns an AsyncGenerator that + * iterates over the results. + */ + iterate() { + return this.stream()[Symbol.asyncIterator](); + } + [Symbol.asyncIterator]() { + return this.iterate(); + } +} +exports.Glob = Glob; +//# sourceMappingURL=glob.js.map + +/***/ }), + +/***/ 5197: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.hasMagic = void 0; +const minimatch_1 = __nccwpck_require__(6507); +/** + * Return true if the patterns provided contain any magic glob characters, + * given the options provided. + * + * Brace expansion is not considered "magic" unless the `magicalBraces` option + * is set, as brace expansion just turns one string into an array of strings. + * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and + * `'xby'` both do not contain any magic glob characters, and it's treated the + * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true` + * is in the options, brace expansion _is_ treated as a pattern having magic. + */ +const hasMagic = (pattern, options = {}) => { + if (!Array.isArray(pattern)) { + pattern = [pattern]; + } + for (const p of pattern) { + if (new minimatch_1.Minimatch(p, options).hasMagic()) + return true; + } + return false; +}; +exports.hasMagic = hasMagic; +//# sourceMappingURL=has-magic.js.map + +/***/ }), + +/***/ 5637: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// give it a pattern, and it'll be able to tell you if +// a given path should be ignored. +// Ignoring a path ignores its children if the pattern ends in /** +// Ignores are always parsed in dot:true mode +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Ignore = void 0; +const minimatch_1 = __nccwpck_require__(6507); +const pattern_js_1 = __nccwpck_require__(7813); +const defaultPlatform = (typeof process === 'object' && + process && + typeof process.platform === 'string') ? + process.platform + : 'linux'; +/** + * Class used to process ignored patterns + */ +class Ignore { + relative; + relativeChildren; + absolute; + absoluteChildren; + platform; + mmopts; + constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) { + this.relative = []; + this.absolute = []; + this.relativeChildren = []; + this.absoluteChildren = []; + this.platform = platform; + this.mmopts = { + dot: true, + nobrace, + nocase, + noext, + noglobstar, + optimizationLevel: 2, + platform, + nocomment: true, + nonegate: true, + }; + for (const ign of ignored) + this.add(ign); + } + add(ign) { + // this is a little weird, but it gives us a clean set of optimized + // minimatch matchers, without getting tripped up if one of them + // ends in /** inside a brace section, and it's only inefficient at + // the start of the walk, not along it. + // It'd be nice if the Pattern class just had a .test() method, but + // handling globstars is a bit of a pita, and that code already lives + // in minimatch anyway. + // Another way would be if maybe Minimatch could take its set/globParts + // as an option, and then we could at least just use Pattern to test + // for absolute-ness. + // Yet another way, Minimatch could take an array of glob strings, and + // a cwd option, and do the right thing. + const mm = new minimatch_1.Minimatch(ign, this.mmopts); + for (let i = 0; i < mm.set.length; i++) { + const parsed = mm.set[i]; + const globParts = mm.globParts[i]; + /* c8 ignore start */ + if (!parsed || !globParts) { + throw new Error('invalid pattern object'); + } + // strip off leading ./ portions + // https://github.com/isaacs/node-glob/issues/570 + while (parsed[0] === '.' && globParts[0] === '.') { + parsed.shift(); + globParts.shift(); + } + /* c8 ignore stop */ + const p = new pattern_js_1.Pattern(parsed, globParts, 0, this.platform); + const m = new minimatch_1.Minimatch(p.globString(), this.mmopts); + const children = globParts[globParts.length - 1] === '**'; + const absolute = p.isAbsolute(); + if (absolute) + this.absolute.push(m); + else + this.relative.push(m); + if (children) { + if (absolute) + this.absoluteChildren.push(m); + else + this.relativeChildren.push(m); + } + } + } + ignored(p) { + const fullpath = p.fullpath(); + const fullpaths = `${fullpath}/`; + const relative = p.relative() || '.'; + const relatives = `${relative}/`; + for (const m of this.relative) { + if (m.match(relative) || m.match(relatives)) + return true; + } + for (const m of this.absolute) { + if (m.match(fullpath) || m.match(fullpaths)) + return true; + } + return false; + } + childrenIgnored(p) { + const fullpath = p.fullpath() + '/'; + const relative = (p.relative() || '.') + '/'; + for (const m of this.relativeChildren) { + if (m.match(relative)) + return true; + } + for (const m of this.absoluteChildren) { + if (m.match(fullpath)) + return true; + } + return false; + } +} +exports.Ignore = Ignore; +//# sourceMappingURL=ignore.js.map + +/***/ }), + +/***/ 1363: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.glob = exports.sync = exports.iterate = exports.iterateSync = exports.stream = exports.streamSync = exports.Ignore = exports.hasMagic = exports.Glob = exports.unescape = exports.escape = void 0; +exports.globStreamSync = globStreamSync; +exports.globStream = globStream; +exports.globSync = globSync; +exports.globIterateSync = globIterateSync; +exports.globIterate = globIterate; +const minimatch_1 = __nccwpck_require__(6507); +const glob_js_1 = __nccwpck_require__(2981); +const has_magic_js_1 = __nccwpck_require__(5197); +var minimatch_2 = __nccwpck_require__(6507); +Object.defineProperty(exports, "escape", ({ enumerable: true, get: function () { return minimatch_2.escape; } })); +Object.defineProperty(exports, "unescape", ({ enumerable: true, get: function () { return minimatch_2.unescape; } })); +var glob_js_2 = __nccwpck_require__(2981); +Object.defineProperty(exports, "Glob", ({ enumerable: true, get: function () { return glob_js_2.Glob; } })); +var has_magic_js_2 = __nccwpck_require__(5197); +Object.defineProperty(exports, "hasMagic", ({ enumerable: true, get: function () { return has_magic_js_2.hasMagic; } })); +var ignore_js_1 = __nccwpck_require__(5637); +Object.defineProperty(exports, "Ignore", ({ enumerable: true, get: function () { return ignore_js_1.Ignore; } })); +function globStreamSync(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).streamSync(); +} +function globStream(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).stream(); +} +function globSync(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).walkSync(); +} +async function glob_(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).walk(); +} +function globIterateSync(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).iterateSync(); +} +function globIterate(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).iterate(); +} +// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc +exports.streamSync = globStreamSync; +exports.stream = Object.assign(globStream, { sync: globStreamSync }); +exports.iterateSync = globIterateSync; +exports.iterate = Object.assign(globIterate, { + sync: globIterateSync, +}); +exports.sync = Object.assign(globSync, { + stream: globStreamSync, + iterate: globIterateSync, +}); +exports.glob = Object.assign(glob_, { + glob: glob_, + globSync, + sync: exports.sync, + globStream, + stream: exports.stream, + globStreamSync, + streamSync: exports.streamSync, + globIterate, + iterate: exports.iterate, + globIterateSync, + iterateSync: exports.iterateSync, + Glob: glob_js_1.Glob, + hasMagic: has_magic_js_1.hasMagic, + escape: minimatch_1.escape, + unescape: minimatch_1.unescape, +}); +exports.glob.glob = exports.glob; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7813: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// this is just a very light wrapper around 2 arrays with an offset index +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Pattern = void 0; +const minimatch_1 = __nccwpck_require__(6507); +const isPatternList = (pl) => pl.length >= 1; +const isGlobList = (gl) => gl.length >= 1; +/** + * An immutable-ish view on an array of glob parts and their parsed + * results + */ +class Pattern { + #patternList; + #globList; + #index; + length; + #platform; + #rest; + #globString; + #isDrive; + #isUNC; + #isAbsolute; + #followGlobstar = true; + constructor(patternList, globList, index, platform) { + if (!isPatternList(patternList)) { + throw new TypeError('empty pattern list'); + } + if (!isGlobList(globList)) { + throw new TypeError('empty glob list'); + } + if (globList.length !== patternList.length) { + throw new TypeError('mismatched pattern list and glob list lengths'); + } + this.length = patternList.length; + if (index < 0 || index >= this.length) { + throw new TypeError('index out of range'); + } + this.#patternList = patternList; + this.#globList = globList; + this.#index = index; + this.#platform = platform; + // normalize root entries of absolute patterns on initial creation. + if (this.#index === 0) { + // c: => ['c:/'] + // C:/ => ['C:/'] + // C:/x => ['C:/', 'x'] + // //host/share => ['//host/share/'] + // //host/share/ => ['//host/share/'] + // //host/share/x => ['//host/share/', 'x'] + // /etc => ['/', 'etc'] + // / => ['/'] + if (this.isUNC()) { + // '' / '' / 'host' / 'share' + const [p0, p1, p2, p3, ...prest] = this.#patternList; + const [g0, g1, g2, g3, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = [p0, p1, p2, p3, ''].join('/'); + const g = [g0, g1, g2, g3, ''].join('/'); + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + else if (this.isDrive() || this.isAbsolute()) { + const [p1, ...prest] = this.#patternList; + const [g1, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = p1 + '/'; + const g = g1 + '/'; + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + } + } + /** + * The first entry in the parsed list of patterns + */ + pattern() { + return this.#patternList[this.#index]; + } + /** + * true of if pattern() returns a string + */ + isString() { + return typeof this.#patternList[this.#index] === 'string'; + } + /** + * true of if pattern() returns GLOBSTAR + */ + isGlobstar() { + return this.#patternList[this.#index] === minimatch_1.GLOBSTAR; + } + /** + * true if pattern() returns a regexp + */ + isRegExp() { + return this.#patternList[this.#index] instanceof RegExp; + } + /** + * The /-joined set of glob parts that make up this pattern + */ + globString() { + return (this.#globString = + this.#globString || + (this.#index === 0 ? + this.isAbsolute() ? + this.#globList[0] + this.#globList.slice(1).join('/') + : this.#globList.join('/') + : this.#globList.slice(this.#index).join('/'))); + } + /** + * true if there are more pattern parts after this one + */ + hasMore() { + return this.length > this.#index + 1; + } + /** + * The rest of the pattern after this part, or null if this is the end + */ + rest() { + if (this.#rest !== undefined) + return this.#rest; + if (!this.hasMore()) + return (this.#rest = null); + this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform); + this.#rest.#isAbsolute = this.#isAbsolute; + this.#rest.#isUNC = this.#isUNC; + this.#rest.#isDrive = this.#isDrive; + return this.#rest; + } + /** + * true if the pattern represents a //unc/path/ on windows + */ + isUNC() { + const pl = this.#patternList; + return this.#isUNC !== undefined ? + this.#isUNC + : (this.#isUNC = + this.#platform === 'win32' && + this.#index === 0 && + pl[0] === '' && + pl[1] === '' && + typeof pl[2] === 'string' && + !!pl[2] && + typeof pl[3] === 'string' && + !!pl[3]); + } + // pattern like C:/... + // split = ['C:', ...] + // XXX: would be nice to handle patterns like `c:*` to test the cwd + // in c: for *, but I don't know of a way to even figure out what that + // cwd is without actually chdir'ing into it? + /** + * True if the pattern starts with a drive letter on Windows + */ + isDrive() { + const pl = this.#patternList; + return this.#isDrive !== undefined ? + this.#isDrive + : (this.#isDrive = + this.#platform === 'win32' && + this.#index === 0 && + this.length > 1 && + typeof pl[0] === 'string' && + /^[a-z]:$/i.test(pl[0])); + } + // pattern = '/' or '/...' or '/x/...' + // split = ['', ''] or ['', ...] or ['', 'x', ...] + // Drive and UNC both considered absolute on windows + /** + * True if the pattern is rooted on an absolute path + */ + isAbsolute() { + const pl = this.#patternList; + return this.#isAbsolute !== undefined ? + this.#isAbsolute + : (this.#isAbsolute = + (pl[0] === '' && pl.length > 1) || + this.isDrive() || + this.isUNC()); + } + /** + * consume the root of the pattern, and return it + */ + root() { + const p = this.#patternList[0]; + return (typeof p === 'string' && this.isAbsolute() && this.#index === 0) ? + p + : ''; + } + /** + * Check to see if the current globstar pattern is allowed to follow + * a symbolic link. + */ + checkFollowGlobstar() { + return !(this.#index === 0 || + !this.isGlobstar() || + !this.#followGlobstar); + } + /** + * Mark that the current globstar pattern is following a symbolic link + */ + markFollowGlobstar() { + if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar) + return false; + this.#followGlobstar = false; + return true; + } +} +exports.Pattern = Pattern; +//# sourceMappingURL=pattern.js.map + +/***/ }), + +/***/ 7843: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// synchronous utility for filtering entries and calculating subwalks +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Processor = exports.SubWalks = exports.MatchRecord = exports.HasWalkedCache = void 0; +const minimatch_1 = __nccwpck_require__(6507); +/** + * A cache of which patterns have been processed for a given Path + */ +class HasWalkedCache { + store; + constructor(store = new Map()) { + this.store = store; + } + copy() { + return new HasWalkedCache(new Map(this.store)); + } + hasWalked(target, pattern) { + return this.store.get(target.fullpath())?.has(pattern.globString()); + } + storeWalked(target, pattern) { + const fullpath = target.fullpath(); + const cached = this.store.get(fullpath); + if (cached) + cached.add(pattern.globString()); + else + this.store.set(fullpath, new Set([pattern.globString()])); + } +} +exports.HasWalkedCache = HasWalkedCache; +/** + * A record of which paths have been matched in a given walk step, + * and whether they only are considered a match if they are a directory, + * and whether their absolute or relative path should be returned. + */ +class MatchRecord { + store = new Map(); + add(target, absolute, ifDir) { + const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0); + const current = this.store.get(target); + this.store.set(target, current === undefined ? n : n & current); + } + // match, absolute, ifdir + entries() { + return [...this.store.entries()].map(([path, n]) => [ + path, + !!(n & 2), + !!(n & 1), + ]); + } +} +exports.MatchRecord = MatchRecord; +/** + * A collection of patterns that must be processed in a subsequent step + * for a given path. + */ +class SubWalks { + store = new Map(); + add(target, pattern) { + if (!target.canReaddir()) { + return; + } + const subs = this.store.get(target); + if (subs) { + if (!subs.find(p => p.globString() === pattern.globString())) { + subs.push(pattern); + } + } + else + this.store.set(target, [pattern]); + } + get(target) { + const subs = this.store.get(target); + /* c8 ignore start */ + if (!subs) { + throw new Error('attempting to walk unknown path'); + } + /* c8 ignore stop */ + return subs; + } + entries() { + return this.keys().map(k => [k, this.store.get(k)]); + } + keys() { + return [...this.store.keys()].filter(t => t.canReaddir()); + } +} +exports.SubWalks = SubWalks; +/** + * The class that processes patterns for a given path. + * + * Handles child entry filtering, and determining whether a path's + * directory contents must be read. + */ +class Processor { + hasWalkedCache; + matches = new MatchRecord(); + subwalks = new SubWalks(); + patterns; + follow; + dot; + opts; + constructor(opts, hasWalkedCache) { + this.opts = opts; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.hasWalkedCache = + hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache(); + } + processPatterns(target, patterns) { + this.patterns = patterns; + const processingSet = patterns.map(p => [target, p]); + // map of paths to the magic-starting subwalks they need to walk + // first item in patterns is the filter + for (let [t, pattern] of processingSet) { + this.hasWalkedCache.storeWalked(t, pattern); + const root = pattern.root(); + const absolute = pattern.isAbsolute() && this.opts.absolute !== false; + // start absolute patterns at root + if (root) { + t = t.resolve(root === '/' && this.opts.root !== undefined ? + this.opts.root + : root); + const rest = pattern.rest(); + if (!rest) { + this.matches.add(t, true, false); + continue; + } + else { + pattern = rest; + } + } + if (t.isENOENT()) + continue; + let p; + let rest; + let changed = false; + while (typeof (p = pattern.pattern()) === 'string' && + (rest = pattern.rest())) { + const c = t.resolve(p); + t = c; + pattern = rest; + changed = true; + } + p = pattern.pattern(); + rest = pattern.rest(); + if (changed) { + if (this.hasWalkedCache.hasWalked(t, pattern)) + continue; + this.hasWalkedCache.storeWalked(t, pattern); + } + // now we have either a final string for a known entry, + // more strings for an unknown entry, + // or a pattern starting with magic, mounted on t. + if (typeof p === 'string') { + // must not be final entry, otherwise we would have + // concatenated it earlier. + const ifDir = p === '..' || p === '' || p === '.'; + this.matches.add(t.resolve(p), absolute, ifDir); + continue; + } + else if (p === minimatch_1.GLOBSTAR) { + // if no rest, match and subwalk pattern + // if rest, process rest and subwalk pattern + // if it's a symlink, but we didn't get here by way of a + // globstar match (meaning it's the first time THIS globstar + // has traversed a symlink), then we follow it. Otherwise, stop. + if (!t.isSymbolicLink() || + this.follow || + pattern.checkFollowGlobstar()) { + this.subwalks.add(t, pattern); + } + const rp = rest?.pattern(); + const rrest = rest?.rest(); + if (!rest || ((rp === '' || rp === '.') && !rrest)) { + // only HAS to be a dir if it ends in **/ or **/. + // but ending in ** will match files as well. + this.matches.add(t, absolute, rp === '' || rp === '.'); + } + else { + if (rp === '..') { + // this would mean you're matching **/.. at the fs root, + // and no thanks, I'm not gonna test that specific case. + /* c8 ignore start */ + const tp = t.parent || t; + /* c8 ignore stop */ + if (!rrest) + this.matches.add(tp, absolute, true); + else if (!this.hasWalkedCache.hasWalked(tp, rrest)) { + this.subwalks.add(tp, rrest); + } + } + } + } + else if (p instanceof RegExp) { + this.subwalks.add(t, pattern); + } + } + return this; + } + subwalkTargets() { + return this.subwalks.keys(); + } + child() { + return new Processor(this.opts, this.hasWalkedCache); + } + // return a new Processor containing the subwalks for each + // child entry, and a set of matches, and + // a hasWalkedCache that's a copy of this one + // then we're going to call + filterEntries(parent, entries) { + const patterns = this.subwalks.get(parent); + // put matches and entry walks into the results processor + const results = this.child(); + for (const e of entries) { + for (const pattern of patterns) { + const absolute = pattern.isAbsolute(); + const p = pattern.pattern(); + const rest = pattern.rest(); + if (p === minimatch_1.GLOBSTAR) { + results.testGlobstar(e, pattern, rest, absolute); + } + else if (p instanceof RegExp) { + results.testRegExp(e, p, rest, absolute); + } + else { + results.testString(e, p, rest, absolute); + } + } + } + return results; + } + testGlobstar(e, pattern, rest, absolute) { + if (this.dot || !e.name.startsWith('.')) { + if (!pattern.hasMore()) { + this.matches.add(e, absolute, false); + } + if (e.canReaddir()) { + // if we're in follow mode or it's not a symlink, just keep + // testing the same pattern. If there's more after the globstar, + // then this symlink consumes the globstar. If not, then we can + // follow at most ONE symlink along the way, so we mark it, which + // also checks to ensure that it wasn't already marked. + if (this.follow || !e.isSymbolicLink()) { + this.subwalks.add(e, pattern); + } + else if (e.isSymbolicLink()) { + if (rest && pattern.checkFollowGlobstar()) { + this.subwalks.add(e, rest); + } + else if (pattern.markFollowGlobstar()) { + this.subwalks.add(e, pattern); + } + } + } + } + // if the NEXT thing matches this entry, then also add + // the rest. + if (rest) { + const rp = rest.pattern(); + if (typeof rp === 'string' && + // dots and empty were handled already + rp !== '..' && + rp !== '' && + rp !== '.') { + this.testString(e, rp, rest.rest(), absolute); + } + else if (rp === '..') { + /* c8 ignore start */ + const ep = e.parent || e; + /* c8 ignore stop */ + this.subwalks.add(ep, rest); + } + else if (rp instanceof RegExp) { + this.testRegExp(e, rp, rest.rest(), absolute); + } + } + } + testRegExp(e, p, rest, absolute) { + if (!p.test(e.name)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } + testString(e, p, rest, absolute) { + // should never happen? + if (!e.isNamed(p)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } +} +exports.Processor = Processor; +//# sourceMappingURL=processor.js.map + +/***/ }), + +/***/ 1157: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GlobStream = exports.GlobWalker = exports.GlobUtil = void 0; +/** + * Single-use utility classes to provide functionality to the {@link Glob} + * methods. + * + * @module + */ +const minipass_1 = __nccwpck_require__(8275); +const ignore_js_1 = __nccwpck_require__(5637); +const processor_js_1 = __nccwpck_require__(7843); +const makeIgnore = (ignore, opts) => typeof ignore === 'string' ? new ignore_js_1.Ignore([ignore], opts) + : Array.isArray(ignore) ? new ignore_js_1.Ignore(ignore, opts) + : ignore; +/** + * basic walking utilities that all the glob walker types use + */ +class GlobUtil { + path; + patterns; + opts; + seen = new Set(); + paused = false; + aborted = false; + #onResume = []; + #ignore; + #sep; + signal; + maxDepth; + includeChildMatches; + constructor(patterns, path, opts) { + this.patterns = patterns; + this.path = path; + this.opts = opts; + this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/'; + this.includeChildMatches = opts.includeChildMatches !== false; + if (opts.ignore || !this.includeChildMatches) { + this.#ignore = makeIgnore(opts.ignore ?? [], opts); + if (!this.includeChildMatches && + typeof this.#ignore.add !== 'function') { + const m = 'cannot ignore child matches, ignore lacks add() method.'; + throw new Error(m); + } + } + // ignore, always set with maxDepth, but it's optional on the + // GlobOptions type + /* c8 ignore start */ + this.maxDepth = opts.maxDepth || Infinity; + /* c8 ignore stop */ + if (opts.signal) { + this.signal = opts.signal; + this.signal.addEventListener('abort', () => { + this.#onResume.length = 0; + }); + } + } + #ignored(path) { + return this.seen.has(path) || !!this.#ignore?.ignored?.(path); + } + #childrenIgnored(path) { + return !!this.#ignore?.childrenIgnored?.(path); + } + // backpressure mechanism + pause() { + this.paused = true; + } + resume() { + /* c8 ignore start */ + if (this.signal?.aborted) + return; + /* c8 ignore stop */ + this.paused = false; + let fn = undefined; + while (!this.paused && (fn = this.#onResume.shift())) { + fn(); + } + } + onResume(fn) { + if (this.signal?.aborted) + return; + /* c8 ignore start */ + if (!this.paused) { + fn(); + } + else { + /* c8 ignore stop */ + this.#onResume.push(fn); + } + } + // do the requisite realpath/stat checking, and return the path + // to add or undefined to filter it out. + async matchCheck(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || (await e.realpath()); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + const s = needStat ? await e.lstat() : e; + if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) { + const target = await s.realpath(); + /* c8 ignore start */ + if (target && (target.isUnknown() || this.opts.stat)) { + await target.lstat(); + } + /* c8 ignore stop */ + } + return this.matchCheckTest(s, ifDir); + } + matchCheckTest(e, ifDir) { + return (e && + (this.maxDepth === Infinity || e.depth() <= this.maxDepth) && + (!ifDir || e.canReaddir()) && + (!this.opts.nodir || !e.isDirectory()) && + (!this.opts.nodir || + !this.opts.follow || + !e.isSymbolicLink() || + !e.realpathCached()?.isDirectory()) && + !this.#ignored(e)) ? + e + : undefined; + } + matchCheckSync(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || e.realpathSync(); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + const s = needStat ? e.lstatSync() : e; + if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) { + const target = s.realpathSync(); + if (target && (target?.isUnknown() || this.opts.stat)) { + target.lstatSync(); + } + } + return this.matchCheckTest(s, ifDir); + } + matchFinish(e, absolute) { + if (this.#ignored(e)) + return; + // we know we have an ignore if this is false, but TS doesn't + if (!this.includeChildMatches && this.#ignore?.add) { + const ign = `${e.relativePosix()}/**`; + this.#ignore.add(ign); + } + const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute; + this.seen.add(e); + const mark = this.opts.mark && e.isDirectory() ? this.#sep : ''; + // ok, we have what we need! + if (this.opts.withFileTypes) { + this.matchEmit(e); + } + else if (abs) { + const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath(); + this.matchEmit(abs + mark); + } + else { + const rel = this.opts.posix ? e.relativePosix() : e.relative(); + const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) ? + '.' + this.#sep + : ''; + this.matchEmit(!rel ? '.' + mark : pre + rel + mark); + } + } + async match(e, absolute, ifDir) { + const p = await this.matchCheck(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + matchSync(e, absolute, ifDir) { + const p = this.matchCheckSync(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + walkCB(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2(target, patterns, new processor_js_1.Processor(this.opts), cb); + } + walkCB2(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const childrenCached = t.readdirCached(); + if (t.calledReaddir()) + this.walkCB3(t, childrenCached, processor, next); + else { + t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true); + } + } + next(); + } + walkCB3(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2(target, patterns, processor.child(), next); + } + next(); + } + walkCBSync(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2Sync(target, patterns, new processor_js_1.Processor(this.opts), cb); + } + walkCB2Sync(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const children = t.readdirSync(); + this.walkCB3Sync(t, children, processor, next); + } + next(); + } + walkCB3Sync(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2Sync(target, patterns, processor.child(), next); + } + next(); + } +} +exports.GlobUtil = GlobUtil; +class GlobWalker extends GlobUtil { + matches = new Set(); + constructor(patterns, path, opts) { + super(patterns, path, opts); + } + matchEmit(e) { + this.matches.add(e); + } + async walk() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + await this.path.lstat(); + } + await new Promise((res, rej) => { + this.walkCB(this.path, this.patterns, () => { + if (this.signal?.aborted) { + rej(this.signal.reason); + } + else { + res(this.matches); + } + }); + }); + return this.matches; + } + walkSync() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + // nothing for the callback to do, because this never pauses + this.walkCBSync(this.path, this.patterns, () => { + if (this.signal?.aborted) + throw this.signal.reason; + }); + return this.matches; + } +} +exports.GlobWalker = GlobWalker; +class GlobStream extends GlobUtil { + results; + constructor(patterns, path, opts) { + super(patterns, path, opts); + this.results = new minipass_1.Minipass({ + signal: this.signal, + objectMode: true, + }); + this.results.on('drain', () => this.resume()); + this.results.on('resume', () => this.resume()); + } + matchEmit(e) { + this.results.write(e); + if (!this.results.flowing) + this.pause(); + } + stream() { + const target = this.path; + if (target.isUnknown()) { + target.lstat().then(() => { + this.walkCB(target, this.patterns, () => this.results.end()); + }); + } + else { + this.walkCB(target, this.patterns, () => this.results.end()); + } + return this.results; + } + streamSync() { + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + this.walkCBSync(this.path, this.patterns, () => this.results.end()); + return this.results; + } +} +exports.GlobStream = GlobStream; +//# sourceMappingURL=walker.js.map + +/***/ }), + +/***/ 2477: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +/** + * @module LRUCache + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.LRUCache = void 0; +const perf = typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date; +const warned = new Set(); +/* c8 ignore start */ +const PROCESS = (typeof process === 'object' && !!process ? process : {}); +/* c8 ignore start */ +const emitWarning = (msg, type, code, fn) => { + typeof PROCESS.emitWarning === 'function' + ? PROCESS.emitWarning(msg, type, code, fn) + : console.error(`[${code}] ${type}: ${msg}`); +}; +let AC = globalThis.AbortController; +let AS = globalThis.AbortSignal; +/* c8 ignore start */ +if (typeof AC === 'undefined') { + //@ts-ignore + AS = class AbortSignal { + onabort; + _onabort = []; + reason; + aborted = false; + addEventListener(_, fn) { + this._onabort.push(fn); + } + }; + //@ts-ignore + AC = class AbortController { + constructor() { + warnACPolyfill(); + } + signal = new AS(); + abort(reason) { + if (this.signal.aborted) + return; + //@ts-ignore + this.signal.reason = reason; + //@ts-ignore + this.signal.aborted = true; + //@ts-ignore + for (const fn of this.signal._onabort) { + fn(reason); + } + this.signal.onabort?.(reason); + } + }; + let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; + const warnACPolyfill = () => { + if (!printACPolyfillWarning) + return; + printACPolyfillWarning = false; + emitWarning('AbortController is not defined. If using lru-cache in ' + + 'node 14, load an AbortController polyfill from the ' + + '`node-abort-controller` package. A minimal polyfill is ' + + 'provided for use by LRUCache.fetch(), but it should not be ' + + 'relied upon in other contexts (eg, passing it to other APIs that ' + + 'use AbortController/AbortSignal might have undesirable effects). ' + + 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); + }; +} +/* c8 ignore stop */ +const shouldWarn = (code) => !warned.has(code); +const TYPE = Symbol('type'); +const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); +/* c8 ignore start */ +// This is a little bit ridiculous, tbh. +// The maximum array length is 2^32-1 or thereabouts on most JS impls. +// And well before that point, you're caching the entire world, I mean, +// that's ~32GB of just integers for the next/prev links, plus whatever +// else to hold that many keys and values. Just filling the memory with +// zeroes at init time is brutal when you get that big. +// But why not be complete? +// Maybe in the future, these limits will have expanded. +const getUintArray = (max) => !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null; +/* c8 ignore stop */ +class ZeroArray extends Array { + constructor(size) { + super(size); + this.fill(0); + } +} +class Stack { + heap; + length; + // private constructor + static #constructing = false; + static create(max) { + const HeapCls = getUintArray(max); + if (!HeapCls) + return []; + Stack.#constructing = true; + const s = new Stack(max, HeapCls); + Stack.#constructing = false; + return s; + } + constructor(max, HeapCls) { + /* c8 ignore start */ + if (!Stack.#constructing) { + throw new TypeError('instantiate Stack using Stack.create(n)'); + } + /* c8 ignore stop */ + this.heap = new HeapCls(max); + this.length = 0; + } + push(n) { + this.heap[this.length++] = n; + } + pop() { + return this.heap[--this.length]; + } +} +/** + * Default export, the thing you're using this module to get. + * + * The `K` and `V` types define the key and value types, respectively. The + * optional `FC` type defines the type of the `context` object passed to + * `cache.fetch()` and `cache.memo()`. + * + * Keys and values **must not** be `null` or `undefined`. + * + * All properties from the options object (with the exception of `max`, + * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are + * added as normal public members. (The listed options are read-only getters.) + * + * Changing any of these will alter the defaults for subsequent method calls. + */ +class LRUCache { + // options that cannot be changed without disaster + #max; + #maxSize; + #dispose; + #disposeAfter; + #fetchMethod; + #memoMethod; + /** + * {@link LRUCache.OptionsBase.ttl} + */ + ttl; + /** + * {@link LRUCache.OptionsBase.ttlResolution} + */ + ttlResolution; + /** + * {@link LRUCache.OptionsBase.ttlAutopurge} + */ + ttlAutopurge; + /** + * {@link LRUCache.OptionsBase.updateAgeOnGet} + */ + updateAgeOnGet; + /** + * {@link LRUCache.OptionsBase.updateAgeOnHas} + */ + updateAgeOnHas; + /** + * {@link LRUCache.OptionsBase.allowStale} + */ + allowStale; + /** + * {@link LRUCache.OptionsBase.noDisposeOnSet} + */ + noDisposeOnSet; + /** + * {@link LRUCache.OptionsBase.noUpdateTTL} + */ + noUpdateTTL; + /** + * {@link LRUCache.OptionsBase.maxEntrySize} + */ + maxEntrySize; + /** + * {@link LRUCache.OptionsBase.sizeCalculation} + */ + sizeCalculation; + /** + * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} + */ + noDeleteOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} + */ + noDeleteOnStaleGet; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} + */ + allowStaleOnFetchAbort; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} + */ + allowStaleOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.ignoreFetchAbort} + */ + ignoreFetchAbort; + // computed properties + #size; + #calculatedSize; + #keyMap; + #keyList; + #valList; + #next; + #prev; + #head; + #tail; + #free; + #disposed; + #sizes; + #starts; + #ttls; + #hasDispose; + #hasFetchMethod; + #hasDisposeAfter; + /** + * Do not call this method unless you need to inspect the + * inner workings of the cache. If anything returned by this + * object is modified in any way, strange breakage may occur. + * + * These fields are private for a reason! + * + * @internal + */ + static unsafeExposeInternals(c) { + return { + // properties + starts: c.#starts, + ttls: c.#ttls, + sizes: c.#sizes, + keyMap: c.#keyMap, + keyList: c.#keyList, + valList: c.#valList, + next: c.#next, + prev: c.#prev, + get head() { + return c.#head; + }, + get tail() { + return c.#tail; + }, + free: c.#free, + // methods + isBackgroundFetch: (p) => c.#isBackgroundFetch(p), + backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), + moveToTail: (index) => c.#moveToTail(index), + indexes: (options) => c.#indexes(options), + rindexes: (options) => c.#rindexes(options), + isStale: (index) => c.#isStale(index), + }; + } + // Protected read-only members + /** + * {@link LRUCache.OptionsBase.max} (read-only) + */ + get max() { + return this.#max; + } + /** + * {@link LRUCache.OptionsBase.maxSize} (read-only) + */ + get maxSize() { + return this.#maxSize; + } + /** + * The total computed size of items in the cache (read-only) + */ + get calculatedSize() { + return this.#calculatedSize; + } + /** + * The number of items stored in the cache (read-only) + */ + get size() { + return this.#size; + } + /** + * {@link LRUCache.OptionsBase.fetchMethod} (read-only) + */ + get fetchMethod() { + return this.#fetchMethod; + } + get memoMethod() { + return this.#memoMethod; + } + /** + * {@link LRUCache.OptionsBase.dispose} (read-only) + */ + get dispose() { + return this.#dispose; + } + /** + * {@link LRUCache.OptionsBase.disposeAfter} (read-only) + */ + get disposeAfter() { + return this.#disposeAfter; + } + constructor(options) { + const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options; + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer'); + } + const UintArray = max ? getUintArray(max) : Array; + if (!UintArray) { + throw new Error('invalid max value: ' + max); + } + this.#max = max; + this.#maxSize = maxSize; + this.maxEntrySize = maxEntrySize || this.#maxSize; + this.sizeCalculation = sizeCalculation; + if (this.sizeCalculation) { + if (!this.#maxSize && !this.maxEntrySize) { + throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function'); + } + } + if (memoMethod !== undefined && + typeof memoMethod !== 'function') { + throw new TypeError('memoMethod must be a function if defined'); + } + this.#memoMethod = memoMethod; + if (fetchMethod !== undefined && + typeof fetchMethod !== 'function') { + throw new TypeError('fetchMethod must be a function if specified'); + } + this.#fetchMethod = fetchMethod; + this.#hasFetchMethod = !!fetchMethod; + this.#keyMap = new Map(); + this.#keyList = new Array(max).fill(undefined); + this.#valList = new Array(max).fill(undefined); + this.#next = new UintArray(max); + this.#prev = new UintArray(max); + this.#head = 0; + this.#tail = 0; + this.#free = Stack.create(max); + this.#size = 0; + this.#calculatedSize = 0; + if (typeof dispose === 'function') { + this.#dispose = dispose; + } + if (typeof disposeAfter === 'function') { + this.#disposeAfter = disposeAfter; + this.#disposed = []; + } + else { + this.#disposeAfter = undefined; + this.#disposed = undefined; + } + this.#hasDispose = !!this.#dispose; + this.#hasDisposeAfter = !!this.#disposeAfter; + this.noDisposeOnSet = !!noDisposeOnSet; + this.noUpdateTTL = !!noUpdateTTL; + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; + this.ignoreFetchAbort = !!ignoreFetchAbort; + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.#maxSize !== 0) { + if (!isPosInt(this.#maxSize)) { + throw new TypeError('maxSize must be a positive integer if specified'); + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError('maxEntrySize must be a positive integer if specified'); + } + this.#initializeSizeTracking(); + } + this.allowStale = !!allowStale; + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; + this.updateAgeOnGet = !!updateAgeOnGet; + this.updateAgeOnHas = !!updateAgeOnHas; + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1; + this.ttlAutopurge = !!ttlAutopurge; + this.ttl = ttl || 0; + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError('ttl must be a positive integer if specified'); + } + this.#initializeTTLTracking(); + } + // do not allow completely unbounded caches + if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { + throw new TypeError('At least one of max, maxSize, or ttl is required'); + } + if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { + const code = 'LRU_CACHE_UNBOUNDED'; + if (shouldWarn(code)) { + warned.add(code); + const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.'; + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); + } + } + } + /** + * Return the number of ms left in the item's TTL. If item is not in cache, + * returns `0`. Returns `Infinity` if item is in cache without a defined TTL. + */ + getRemainingTTL(key) { + return this.#keyMap.has(key) ? Infinity : 0; + } + #initializeTTLTracking() { + const ttls = new ZeroArray(this.#max); + const starts = new ZeroArray(this.#max); + this.#ttls = ttls; + this.#starts = starts; + this.#setItemTTL = (index, ttl, start = perf.now()) => { + starts[index] = ttl !== 0 ? start : 0; + ttls[index] = ttl; + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.#isStale(index)) { + this.#delete(this.#keyList[index], 'expire'); + } + }, ttl + 1); + // unref() not supported on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + }; + this.#updateItemAge = index => { + starts[index] = ttls[index] !== 0 ? perf.now() : 0; + }; + this.#statusTTL = (status, index) => { + if (ttls[index]) { + const ttl = ttls[index]; + const start = starts[index]; + /* c8 ignore next */ + if (!ttl || !start) + return; + status.ttl = ttl; + status.start = start; + status.now = cachedNow || getNow(); + const age = status.now - start; + status.remainingTTL = ttl - age; + } + }; + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0; + const getNow = () => { + const n = perf.now(); + if (this.ttlResolution > 0) { + cachedNow = n; + const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); + // not available on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + return n; + }; + this.getRemainingTTL = key => { + const index = this.#keyMap.get(key); + if (index === undefined) { + return 0; + } + const ttl = ttls[index]; + const start = starts[index]; + if (!ttl || !start) { + return Infinity; + } + const age = (cachedNow || getNow()) - start; + return ttl - age; + }; + this.#isStale = index => { + const s = starts[index]; + const t = ttls[index]; + return !!t && !!s && (cachedNow || getNow()) - s > t; + }; + } + // conditionally set private methods related to TTL + #updateItemAge = () => { }; + #statusTTL = () => { }; + #setItemTTL = () => { }; + /* c8 ignore stop */ + #isStale = () => false; + #initializeSizeTracking() { + const sizes = new ZeroArray(this.#max); + this.#calculatedSize = 0; + this.#sizes = sizes; + this.#removeItemSize = index => { + this.#calculatedSize -= sizes[index]; + sizes[index] = 0; + }; + this.#requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.#isBackgroundFetch(v)) { + return 0; + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function'); + } + size = sizeCalculation(v, k); + if (!isPosInt(size)) { + throw new TypeError('sizeCalculation return invalid (expect positive integer)'); + } + } + else { + throw new TypeError('invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation ' + + 'or size must be set.'); + } + } + return size; + }; + this.#addItemSize = (index, size, status) => { + sizes[index] = size; + if (this.#maxSize) { + const maxSize = this.#maxSize - sizes[index]; + while (this.#calculatedSize > maxSize) { + this.#evict(true); + } + } + this.#calculatedSize += sizes[index]; + if (status) { + status.entrySize = size; + status.totalCalculatedSize = this.#calculatedSize; + } + }; + } + #removeItemSize = _i => { }; + #addItemSize = (_i, _s, _st) => { }; + #requireSize = (_k, _v, size, sizeCalculation) => { + if (size || sizeCalculation) { + throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); + } + return 0; + }; + *#indexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#tail; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#head) { + break; + } + else { + i = this.#prev[i]; + } + } + } + } + *#rindexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#head; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#tail) { + break; + } + else { + i = this.#next[i]; + } + } + } + } + #isValidIndex(index) { + return (index !== undefined && + this.#keyMap.get(this.#keyList[index]) === index); + } + /** + * Return a generator yielding `[key, value]` pairs, + * in order from most recently used to least recently used. + */ + *entries() { + for (const i of this.#indexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Inverse order version of {@link LRUCache.entries} + * + * Return a generator yielding `[key, value]` pairs, + * in order from least recently used to most recently used. + */ + *rentries() { + for (const i of this.#rindexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Return a generator yielding the keys in the cache, + * in order from most recently used to least recently used. + */ + *keys() { + for (const i of this.#indexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Inverse order version of {@link LRUCache.keys} + * + * Return a generator yielding the keys in the cache, + * in order from least recently used to most recently used. + */ + *rkeys() { + for (const i of this.#rindexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Return a generator yielding the values in the cache, + * in order from most recently used to least recently used. + */ + *values() { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Inverse order version of {@link LRUCache.values} + * + * Return a generator yielding the values in the cache, + * in order from least recently used to most recently used. + */ + *rvalues() { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Iterating over the cache itself yields the same results as + * {@link LRUCache.entries} + */ + [Symbol.iterator]() { + return this.entries(); + } + /** + * A String value that is used in the creation of the default string + * description of an object. Called by the built-in method + * `Object.prototype.toString`. + */ + [Symbol.toStringTag] = 'LRUCache'; + /** + * Find a value for which the supplied fn method returns a truthy value, + * similar to `Array.find()`. fn is called as `fn(value, key, cache)`. + */ + find(fn, getOptions = {}) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + if (fn(value, this.#keyList[i], this)) { + return this.get(this.#keyList[i], getOptions); + } + } + } + /** + * Call the supplied function on each item in the cache, in order from most + * recently used to least recently used. + * + * `fn` is called as `fn(value, key, cache)`. + * + * If `thisp` is provided, function will be called in the `this`-context of + * the provided object, or the cache if no `thisp` object is provided. + * + * Does not update age or recenty of use, or iterate over stale values. + */ + forEach(fn, thisp = this) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * The same as {@link LRUCache.forEach} but items are iterated over in + * reverse order. (ie, less recently used items are iterated over first.) + */ + rforEach(fn, thisp = this) { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * Delete any stale entries. Returns true if anything was removed, + * false otherwise. + */ + purgeStale() { + let deleted = false; + for (const i of this.#rindexes({ allowStale: true })) { + if (this.#isStale(i)) { + this.#delete(this.#keyList[i], 'expire'); + deleted = true; + } + } + return deleted; + } + /** + * Get the extended info about a given entry, to get its value, size, and + * TTL info simultaneously. Returns `undefined` if the key is not present. + * + * Unlike {@link LRUCache#dump}, which is designed to be portable and survive + * serialization, the `start` value is always the current timestamp, and the + * `ttl` is a calculated remaining time to live (negative if expired). + * + * Always returns stale values, if their info is found in the cache, so be + * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl}) + * if relevant. + */ + info(key) { + const i = this.#keyMap.get(key); + if (i === undefined) + return undefined; + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + return undefined; + const entry = { value }; + if (this.#ttls && this.#starts) { + const ttl = this.#ttls[i]; + const start = this.#starts[i]; + if (ttl && start) { + const remain = ttl - (perf.now() - start); + entry.ttl = remain; + entry.start = Date.now(); + } + } + if (this.#sizes) { + entry.size = this.#sizes[i]; + } + return entry; + } + /** + * Return an array of [key, {@link LRUCache.Entry}] tuples which can be + * passed to {@link LRUCache#load}. + * + * The `start` fields are calculated relative to a portable `Date.now()` + * timestamp, even if `performance.now()` is available. + * + * Stale entries are always included in the `dump`, even if + * {@link LRUCache.OptionsBase.allowStale} is false. + * + * Note: this returns an actual array, not a generator, so it can be more + * easily passed around. + */ + dump() { + const arr = []; + for (const i of this.#indexes({ allowStale: true })) { + const key = this.#keyList[i]; + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined || key === undefined) + continue; + const entry = { value }; + if (this.#ttls && this.#starts) { + entry.ttl = this.#ttls[i]; + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.#starts[i]; + entry.start = Math.floor(Date.now() - age); + } + if (this.#sizes) { + entry.size = this.#sizes[i]; + } + arr.unshift([key, entry]); + } + return arr; + } + /** + * Reset the cache and load in the items in entries in the order listed. + * + * The shape of the resulting cache may be different if the same options are + * not used in both caches. + * + * The `start` fields are assumed to be calculated relative to a portable + * `Date.now()` timestamp, even if `performance.now()` is available. + */ + load(arr) { + this.clear(); + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset, so that + // we get the intended remaining TTL, no matter how long it's + // been on ice. + // + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start; + entry.start = perf.now() - age; + } + this.set(key, entry.value, entry); + } + } + /** + * Add a value to the cache. + * + * Note: if `undefined` is specified as a value, this is an alias for + * {@link LRUCache#delete} + * + * Fields on the {@link LRUCache.SetOptions} options param will override + * their corresponding values in the constructor options for the scope + * of this single `set()` operation. + * + * If `start` is provided, then that will set the effective start + * time for the TTL calculation. Note that this must be a previous + * value of `performance.now()` if supported, or a previous value of + * `Date.now()` if not. + * + * Options object may also include `size`, which will prevent + * calling the `sizeCalculation` function and just use the specified + * number if it is a positive integer, and `noDisposeOnSet` which + * will prevent calling a `dispose` function in the case of + * overwrites. + * + * If the `size` (or return value of `sizeCalculation`) for a given + * entry is greater than `maxEntrySize`, then the item will not be + * added to the cache. + * + * Will update the recency of the entry. + * + * If the value is `undefined`, then this is an alias for + * `cache.delete(key)`. `undefined` is never stored in the cache. + */ + set(k, v, setOptions = {}) { + if (v === undefined) { + this.delete(k); + return this; + } + const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; + let { noUpdateTTL = this.noUpdateTTL } = setOptions; + const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss'; + status.maxEntrySizeExceeded = true; + } + // have to delete, in case something is there already. + this.#delete(k, 'set'); + return this; + } + let index = this.#size === 0 ? undefined : this.#keyMap.get(k); + if (index === undefined) { + // addition + index = (this.#size === 0 + ? this.#tail + : this.#free.length !== 0 + ? this.#free.pop() + : this.#size === this.#max + ? this.#evict(false) + : this.#size); + this.#keyList[index] = k; + this.#valList[index] = v; + this.#keyMap.set(k, index); + this.#next[this.#tail] = index; + this.#prev[index] = this.#tail; + this.#tail = index; + this.#size++; + this.#addItemSize(index, size, status); + if (status) + status.set = 'add'; + noUpdateTTL = false; + } + else { + // update + this.#moveToTail(index); + const oldVal = this.#valList[index]; + if (v !== oldVal) { + if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')); + const { __staleWhileFetching: s } = oldVal; + if (s !== undefined && !noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(s, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([s, k, 'set']); + } + } + } + else if (!noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(oldVal, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([oldVal, k, 'set']); + } + } + this.#removeItemSize(index); + this.#addItemSize(index, size, status); + this.#valList[index] = v; + if (status) { + status.set = 'replace'; + const oldValue = oldVal && this.#isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal; + if (oldValue !== undefined) + status.oldValue = oldValue; + } + } + else if (status) { + status.set = 'update'; + } + } + if (ttl !== 0 && !this.#ttls) { + this.#initializeTTLTracking(); + } + if (this.#ttls) { + if (!noUpdateTTL) { + this.#setItemTTL(index, ttl, start); + } + if (status) + this.#statusTTL(status, index); + } + if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return this; + } + /** + * Evict the least recently used item, returning its value or + * `undefined` if cache is empty. + */ + pop() { + try { + while (this.#size) { + const val = this.#valList[this.#head]; + this.#evict(true); + if (this.#isBackgroundFetch(val)) { + if (val.__staleWhileFetching) { + return val.__staleWhileFetching; + } + } + else if (val !== undefined) { + return val; + } + } + } + finally { + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } + } + #evict(free) { + const head = this.#head; + const k = this.#keyList[head]; + const v = this.#valList[head]; + if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'evict'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'evict']); + } + } + this.#removeItemSize(head); + // if we aren't about to use the index, then null these out + if (free) { + this.#keyList[head] = undefined; + this.#valList[head] = undefined; + this.#free.push(head); + } + if (this.#size === 1) { + this.#head = this.#tail = 0; + this.#free.length = 0; + } + else { + this.#head = this.#next[head]; + } + this.#keyMap.delete(k); + this.#size--; + return head; + } + /** + * Check if a key is in the cache, without updating the recency of use. + * Will return false if the item is stale, even though it is technically + * in the cache. + * + * Check if a key is in the cache, without updating the recency of + * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set + * to `true` in either the options or the constructor. + * + * Will return `false` if the item is stale, even though it is technically in + * the cache. The difference can be determined (if it matters) by using a + * `status` argument, and inspecting the `has` field. + * + * Will not update item age unless + * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. + */ + has(k, hasOptions = {}) { + const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v) && + v.__staleWhileFetching === undefined) { + return false; + } + if (!this.#isStale(index)) { + if (updateAgeOnHas) { + this.#updateItemAge(index); + } + if (status) { + status.has = 'hit'; + this.#statusTTL(status, index); + } + return true; + } + else if (status) { + status.has = 'stale'; + this.#statusTTL(status, index); + } + } + else if (status) { + status.has = 'miss'; + } + return false; + } + /** + * Like {@link LRUCache#get} but doesn't update recency or delete stale + * items. + * + * Returns `undefined` if the item is stale, unless + * {@link LRUCache.OptionsBase.allowStale} is set. + */ + peek(k, peekOptions = {}) { + const { allowStale = this.allowStale } = peekOptions; + const index = this.#keyMap.get(k); + if (index === undefined || + (!allowStale && this.#isStale(index))) { + return; + } + const v = this.#valList[index]; + // either stale and allowed, or forcing a refresh of non-stale value + return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + } + #backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + return v; + } + const ac = new AC(); + const { signal } = options; + // when/if our AC signals, then stop listening to theirs. + signal?.addEventListener('abort', () => ac.abort(signal.reason), { + signal: ac.signal, + }); + const fetchOpts = { + signal: ac.signal, + options, + context, + }; + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal; + const ignoreAbort = options.ignoreFetchAbort && v !== undefined; + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true; + options.status.fetchError = ac.signal.reason; + if (ignoreAbort) + options.status.fetchAbortIgnored = true; + } + else { + options.status.fetchResolved = true; + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason); + } + // either we didn't abort, and are still here, or we did, and ignored + const bf = p; + if (this.#valList[index] === p) { + if (v === undefined) { + if (bf.__staleWhileFetching) { + this.#valList[index] = bf.__staleWhileFetching; + } + else { + this.#delete(k, 'fetch'); + } + } + else { + if (options.status) + options.status.fetchUpdated = true; + this.set(k, v, fetchOpts.options); + } + } + return v; + }; + const eb = (er) => { + if (options.status) { + options.status.fetchRejected = true; + options.status.fetchError = er; + } + return fetchFail(er); + }; + const fetchFail = (er) => { + const { aborted } = ac.signal; + const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; + const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; + const noDelete = allowStale || options.noDeleteOnFetchRejection; + const bf = p; + if (this.#valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || bf.__staleWhileFetching === undefined; + if (del) { + this.#delete(k, 'fetch'); + } + else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.#valList[index] = bf.__staleWhileFetching; + } + } + if (allowStale) { + if (options.status && bf.__staleWhileFetching !== undefined) { + options.status.returnedStale = true; + } + return bf.__staleWhileFetching; + } + else if (bf.__returned === bf) { + throw er; + } + }; + const pcall = (res, rej) => { + const fmp = this.#fetchMethod?.(k, v, fetchOpts); + if (fmp && fmp instanceof Promise) { + fmp.then(v => res(v === undefined ? undefined : v), rej); + } + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if (!options.ignoreFetchAbort || + options.allowStaleOnFetchAbort) { + res(undefined); + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true); + } + } + }); + }; + if (options.status) + options.status.fetchDispatched = true; + const p = new Promise(pcall).then(cb, eb); + const bf = Object.assign(p, { + __abortController: ac, + __staleWhileFetching: v, + __returned: undefined, + }); + if (index === undefined) { + // internal, don't expose status. + this.set(k, bf, { ...fetchOpts.options, status: undefined }); + index = this.#keyMap.get(k); + } + else { + this.#valList[index] = bf; + } + return bf; + } + #isBackgroundFetch(p) { + if (!this.#hasFetchMethod) + return false; + const b = p; + return (!!b && + b instanceof Promise && + b.hasOwnProperty('__staleWhileFetching') && + b.__abortController instanceof AC); + } + async fetch(k, fetchOptions = {}) { + const { + // get options + allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; + if (!this.#hasFetchMethod) { + if (status) + status.fetch = 'get'; + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }); + } + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + }; + let index = this.#keyMap.get(k); + if (index === undefined) { + if (status) + status.fetch = 'miss'; + const p = this.#backgroundFetch(k, index, options, context); + return (p.__returned = p); + } + else { + // in cache, maybe already fetching + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + const stale = allowStale && v.__staleWhileFetching !== undefined; + if (status) { + status.fetch = 'inflight'; + if (stale) + status.returnedStale = true; + } + return stale ? v.__staleWhileFetching : (v.__returned = v); + } + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.#isStale(index); + if (!forceRefresh && !isStale) { + if (status) + status.fetch = 'hit'; + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + if (status) + this.#statusTTL(status, index); + return v; + } + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.#backgroundFetch(k, index, options, context); + const hasStale = p.__staleWhileFetching !== undefined; + const staleVal = hasStale && allowStale; + if (status) { + status.fetch = isStale ? 'stale' : 'refresh'; + if (staleVal && isStale) + status.returnedStale = true; + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p); + } + } + async forceFetch(k, fetchOptions = {}) { + const v = await this.fetch(k, fetchOptions); + if (v === undefined) + throw new Error('fetch() returned undefined'); + return v; + } + memo(k, memoOptions = {}) { + const memoMethod = this.#memoMethod; + if (!memoMethod) { + throw new Error('no memoMethod provided to constructor'); + } + const { context, forceRefresh, ...options } = memoOptions; + const v = this.get(k, options); + if (!forceRefresh && v !== undefined) + return v; + const vv = memoMethod(k, v, { + options, + context, + }); + this.set(k, vv, options); + return vv; + } + /** + * Return a value from the cache. Will update the recency of the cache + * entry found. + * + * If the key is not found, get() will return `undefined`. + */ + get(k, getOptions = {}) { + const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const value = this.#valList[index]; + const fetching = this.#isBackgroundFetch(value); + if (status) + this.#statusTTL(status, index); + if (this.#isStale(index)) { + if (status) + status.get = 'stale'; + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.#delete(k, 'expire'); + } + if (status && allowStale) + status.returnedStale = true; + return allowStale ? value : undefined; + } + else { + if (status && + allowStale && + value.__staleWhileFetching !== undefined) { + status.returnedStale = true; + } + return allowStale ? value.__staleWhileFetching : undefined; + } + } + else { + if (status) + status.get = 'hit'; + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching; + } + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + return value; + } + } + else if (status) { + status.get = 'miss'; + } + } + #connect(p, n) { + this.#prev[n] = p; + this.#next[p] = n; + } + #moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.#tail) { + if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#connect(this.#prev[index], this.#next[index]); + } + this.#connect(this.#tail, index); + this.#tail = index; + } + } + /** + * Deletes a key out of the cache. + * + * Returns true if the key was deleted, false otherwise. + */ + delete(k) { + return this.#delete(k, 'delete'); + } + #delete(k, reason) { + let deleted = false; + if (this.#size !== 0) { + const index = this.#keyMap.get(k); + if (index !== undefined) { + deleted = true; + if (this.#size === 1) { + this.#clear(reason); + } + else { + this.#removeItemSize(index); + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, reason); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, reason]); + } + } + this.#keyMap.delete(k); + this.#keyList[index] = undefined; + this.#valList[index] = undefined; + if (index === this.#tail) { + this.#tail = this.#prev[index]; + } + else if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + const pi = this.#prev[index]; + this.#next[pi] = this.#next[index]; + const ni = this.#next[index]; + this.#prev[ni] = this.#prev[index]; + } + this.#size--; + this.#free.push(index); + } + } + } + if (this.#hasDisposeAfter && this.#disposed?.length) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return deleted; + } + /** + * Clear the cache entirely, throwing away all values. + */ + clear() { + return this.#clear('delete'); + } + #clear(reason) { + for (const index of this.#rindexes({ allowStale: true })) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else { + const k = this.#keyList[index]; + if (this.#hasDispose) { + this.#dispose?.(v, k, reason); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, reason]); + } + } + } + this.#keyMap.clear(); + this.#valList.fill(undefined); + this.#keyList.fill(undefined); + if (this.#ttls && this.#starts) { + this.#ttls.fill(0); + this.#starts.fill(0); + } + if (this.#sizes) { + this.#sizes.fill(0); + } + this.#head = 0; + this.#tail = 0; + this.#free.length = 0; + this.#calculatedSize = 0; + this.#size = 0; + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } +} +exports.LRUCache = LRUCache; +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 7305: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.assertValidPattern = void 0; +const MAX_PATTERN_LENGTH = 1024 * 64; +const assertValidPattern = (pattern) => { + if (typeof pattern !== 'string') { + throw new TypeError('invalid pattern'); + } + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError('pattern is too long'); + } +}; +exports.assertValidPattern = assertValidPattern; +//# sourceMappingURL=assert-valid-pattern.js.map + +/***/ }), + +/***/ 1803: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +// parse a single path portion +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.AST = void 0; +const brace_expressions_js_1 = __nccwpck_require__(1090); +const unescape_js_1 = __nccwpck_require__(851); +const types = new Set(['!', '?', '+', '*', '@']); +const isExtglobType = (c) => types.has(c); +// Patterns that get prepended to bind to the start of either the +// entire string, or just a single path portion, to prevent dots +// and/or traversal patterns, when needed. +// Exts don't need the ^ or / bit, because the root binds that already. +const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))'; +const startNoDot = '(?!\\.)'; +// characters that indicate a start of pattern needs the "no dots" bit, +// because a dot *might* be matched. ( is not in the list, because in +// the case of a child extglob, it will handle the prevention itself. +const addPatternStart = new Set(['[', '.']); +// cases where traversal is A-OK, no dot prevention needed +const justDots = new Set(['..', '.']); +const reSpecials = new Set('().*{}+?[]^$\\!'); +const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +// any single thing other than / +const qmark = '[^/]'; +// * => any number of characters +const star = qmark + '*?'; +// use + when we need to ensure that *something* matches, because the * is +// the only thing in the path portion. +const starNoEmpty = qmark + '+?'; +// remove the \ chars that we added if we end up doing a nonmagic compare +// const deslash = (s: string) => s.replace(/\\(.)/g, '$1') +class AST { + type; + #root; + #hasMagic; + #uflag = false; + #parts = []; + #parent; + #parentIndex; + #negs; + #filledNegs = false; + #options; + #toString; + // set to true if it's an extglob with no children + // (which really means one child of '') + #emptyExt = false; + constructor(type, parent, options = {}) { + this.type = type; + // extglobs are inherently magical + if (type) + this.#hasMagic = true; + this.#parent = parent; + this.#root = this.#parent ? this.#parent.#root : this; + this.#options = this.#root === this ? options : this.#root.#options; + this.#negs = this.#root === this ? [] : this.#root.#negs; + if (type === '!' && !this.#root.#filledNegs) + this.#negs.push(this); + this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0; + } + get hasMagic() { + /* c8 ignore start */ + if (this.#hasMagic !== undefined) + return this.#hasMagic; + /* c8 ignore stop */ + for (const p of this.#parts) { + if (typeof p === 'string') + continue; + if (p.type || p.hasMagic) + return (this.#hasMagic = true); + } + // note: will be undefined until we generate the regexp src and find out + return this.#hasMagic; + } + // reconstructs the pattern + toString() { + if (this.#toString !== undefined) + return this.#toString; + if (!this.type) { + return (this.#toString = this.#parts.map(p => String(p)).join('')); + } + else { + return (this.#toString = + this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')'); + } + } + #fillNegs() { + /* c8 ignore start */ + if (this !== this.#root) + throw new Error('should only call on root'); + if (this.#filledNegs) + return this; + /* c8 ignore stop */ + // call toString() once to fill this out + this.toString(); + this.#filledNegs = true; + let n; + while ((n = this.#negs.pop())) { + if (n.type !== '!') + continue; + // walk up the tree, appending everthing that comes AFTER parentIndex + let p = n; + let pp = p.#parent; + while (pp) { + for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) { + for (const part of n.#parts) { + /* c8 ignore start */ + if (typeof part === 'string') { + throw new Error('string part in extglob AST??'); + } + /* c8 ignore stop */ + part.copyIn(pp.#parts[i]); + } + } + p = pp; + pp = p.#parent; + } + } + return this; + } + push(...parts) { + for (const p of parts) { + if (p === '') + continue; + /* c8 ignore start */ + if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) { + throw new Error('invalid part: ' + p); + } + /* c8 ignore stop */ + this.#parts.push(p); + } + } + toJSON() { + const ret = this.type === null + ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON())) + : [this.type, ...this.#parts.map(p => p.toJSON())]; + if (this.isStart() && !this.type) + ret.unshift([]); + if (this.isEnd() && + (this === this.#root || + (this.#root.#filledNegs && this.#parent?.type === '!'))) { + ret.push({}); + } + return ret; + } + isStart() { + if (this.#root === this) + return true; + // if (this.type) return !!this.#parent?.isStart() + if (!this.#parent?.isStart()) + return false; + if (this.#parentIndex === 0) + return true; + // if everything AHEAD of this is a negation, then it's still the "start" + const p = this.#parent; + for (let i = 0; i < this.#parentIndex; i++) { + const pp = p.#parts[i]; + if (!(pp instanceof AST && pp.type === '!')) { + return false; + } + } + return true; + } + isEnd() { + if (this.#root === this) + return true; + if (this.#parent?.type === '!') + return true; + if (!this.#parent?.isEnd()) + return false; + if (!this.type) + return this.#parent?.isEnd(); + // if not root, it'll always have a parent + /* c8 ignore start */ + const pl = this.#parent ? this.#parent.#parts.length : 0; + /* c8 ignore stop */ + return this.#parentIndex === pl - 1; + } + copyIn(part) { + if (typeof part === 'string') + this.push(part); + else + this.push(part.clone(this)); + } + clone(parent) { + const c = new AST(this.type, parent); + for (const p of this.#parts) { + c.copyIn(p); + } + return c; + } + static #parseAST(str, ast, pos, opt) { + let escaping = false; + let inBrace = false; + let braceStart = -1; + let braceNeg = false; + if (ast.type === null) { + // outside of a extglob, append until we find a start + let i = pos; + let acc = ''; + while (i < str.length) { + const c = str.charAt(i++); + // still accumulate escapes at this point, but we do ignore + // starts that are escaped + if (escaping || c === '\\') { + escaping = !escaping; + acc += c; + continue; + } + if (inBrace) { + if (i === braceStart + 1) { + if (c === '^' || c === '!') { + braceNeg = true; + } + } + else if (c === ']' && !(i === braceStart + 2 && braceNeg)) { + inBrace = false; + } + acc += c; + continue; + } + else if (c === '[') { + inBrace = true; + braceStart = i; + braceNeg = false; + acc += c; + continue; + } + if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') { + ast.push(acc); + acc = ''; + const ext = new AST(c, ast); + i = AST.#parseAST(str, ext, i, opt); + ast.push(ext); + continue; + } + acc += c; + } + ast.push(acc); + return i; + } + // some kind of extglob, pos is at the ( + // find the next | or ) + let i = pos + 1; + let part = new AST(null, ast); + const parts = []; + let acc = ''; + while (i < str.length) { + const c = str.charAt(i++); + // still accumulate escapes at this point, but we do ignore + // starts that are escaped + if (escaping || c === '\\') { + escaping = !escaping; + acc += c; + continue; + } + if (inBrace) { + if (i === braceStart + 1) { + if (c === '^' || c === '!') { + braceNeg = true; + } + } + else if (c === ']' && !(i === braceStart + 2 && braceNeg)) { + inBrace = false; + } + acc += c; + continue; + } + else if (c === '[') { + inBrace = true; + braceStart = i; + braceNeg = false; + acc += c; + continue; + } + if (isExtglobType(c) && str.charAt(i) === '(') { + part.push(acc); + acc = ''; + const ext = new AST(c, part); + part.push(ext); + i = AST.#parseAST(str, ext, i, opt); + continue; + } + if (c === '|') { + part.push(acc); + acc = ''; + parts.push(part); + part = new AST(null, ast); + continue; + } + if (c === ')') { + if (acc === '' && ast.#parts.length === 0) { + ast.#emptyExt = true; + } + part.push(acc); + acc = ''; + ast.push(...parts, part); + return i; + } + acc += c; + } + // unfinished extglob + // if we got here, it was a malformed extglob! not an extglob, but + // maybe something else in there. + ast.type = null; + ast.#hasMagic = undefined; + ast.#parts = [str.substring(pos - 1)]; + return i; + } + static fromGlob(pattern, options = {}) { + const ast = new AST(null, undefined, options); + AST.#parseAST(pattern, ast, 0, options); + return ast; + } + // returns the regular expression if there's magic, or the unescaped + // string if not. + toMMPattern() { + // should only be called on root + /* c8 ignore start */ + if (this !== this.#root) + return this.#root.toMMPattern(); + /* c8 ignore stop */ + const glob = this.toString(); + const [re, body, hasMagic, uflag] = this.toRegExpSource(); + // if we're in nocase mode, and not nocaseMagicOnly, then we do + // still need a regular expression if we have to case-insensitively + // match capital/lowercase characters. + const anyMagic = hasMagic || + this.#hasMagic || + (this.#options.nocase && + !this.#options.nocaseMagicOnly && + glob.toUpperCase() !== glob.toLowerCase()); + if (!anyMagic) { + return body; + } + const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : ''); + return Object.assign(new RegExp(`^${re}$`, flags), { + _src: re, + _glob: glob, + }); + } + get options() { + return this.#options; + } + // returns the string match, the regexp source, whether there's magic + // in the regexp (so a regular expression is required) and whether or + // not the uflag is needed for the regular expression (for posix classes) + // TODO: instead of injecting the start/end at this point, just return + // the BODY of the regexp, along with the start/end portions suitable + // for binding the start/end in either a joined full-path makeRe context + // (where we bind to (^|/), or a standalone matchPart context (where + // we bind to ^, and not /). Otherwise slashes get duped! + // + // In part-matching mode, the start is: + // - if not isStart: nothing + // - if traversal possible, but not allowed: ^(?!\.\.?$) + // - if dots allowed or not possible: ^ + // - if dots possible and not allowed: ^(?!\.) + // end is: + // - if not isEnd(): nothing + // - else: $ + // + // In full-path matching mode, we put the slash at the START of the + // pattern, so start is: + // - if first pattern: same as part-matching mode + // - if not isStart(): nothing + // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/)) + // - if dots allowed or not possible: / + // - if dots possible and not allowed: /(?!\.) + // end is: + // - if last pattern, same as part-matching mode + // - else nothing + // + // Always put the (?:$|/) on negated tails, though, because that has to be + // there to bind the end of the negated pattern portion, and it's easier to + // just stick it in now rather than try to inject it later in the middle of + // the pattern. + // + // We can just always return the same end, and leave it up to the caller + // to know whether it's going to be used joined or in parts. + // And, if the start is adjusted slightly, can do the same there: + // - if not isStart: nothing + // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$) + // - if dots allowed or not possible: (?:/|^) + // - if dots possible and not allowed: (?:/|^)(?!\.) + // + // But it's better to have a simpler binding without a conditional, for + // performance, so probably better to return both start options. + // + // Then the caller just ignores the end if it's not the first pattern, + // and the start always gets applied. + // + // But that's always going to be $ if it's the ending pattern, or nothing, + // so the caller can just attach $ at the end of the pattern when building. + // + // So the todo is: + // - better detect what kind of start is needed + // - return both flavors of starting pattern + // - attach $ at the end of the pattern when creating the actual RegExp + // + // Ah, but wait, no, that all only applies to the root when the first pattern + // is not an extglob. If the first pattern IS an extglob, then we need all + // that dot prevention biz to live in the extglob portions, because eg + // +(*|.x*) can match .xy but not .yx. + // + // So, return the two flavors if it's #root and the first child is not an + // AST, otherwise leave it to the child AST to handle it, and there, + // use the (?:^|/) style of start binding. + // + // Even simplified further: + // - Since the start for a join is eg /(?!\.) and the start for a part + // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root + // or start or whatever) and prepend ^ or / at the Regexp construction. + toRegExpSource(allowDot) { + const dot = allowDot ?? !!this.#options.dot; + if (this.#root === this) + this.#fillNegs(); + if (!this.type) { + const noEmpty = this.isStart() && this.isEnd(); + const src = this.#parts + .map(p => { + const [re, _, hasMagic, uflag] = typeof p === 'string' + ? AST.#parseGlob(p, this.#hasMagic, noEmpty) + : p.toRegExpSource(allowDot); + this.#hasMagic = this.#hasMagic || hasMagic; + this.#uflag = this.#uflag || uflag; + return re; + }) + .join(''); + let start = ''; + if (this.isStart()) { + if (typeof this.#parts[0] === 'string') { + // this is the string that will match the start of the pattern, + // so we need to protect against dots and such. + // '.' and '..' cannot match unless the pattern is that exactly, + // even if it starts with . or dot:true is set. + const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]); + if (!dotTravAllowed) { + const aps = addPatternStart; + // check if we have a possibility of matching . or .., + // and prevent that. + const needNoTrav = + // dots are allowed, and the pattern starts with [ or . + (dot && aps.has(src.charAt(0))) || + // the pattern starts with \., and then [ or . + (src.startsWith('\\.') && aps.has(src.charAt(2))) || + // the pattern starts with \.\., and then [ or . + (src.startsWith('\\.\\.') && aps.has(src.charAt(4))); + // no need to prevent dots if it can't match a dot, or if a + // sub-pattern will be preventing it anyway. + const needNoDot = !dot && !allowDot && aps.has(src.charAt(0)); + start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : ''; + } + } + } + // append the "end of path portion" pattern to negation tails + let end = ''; + if (this.isEnd() && + this.#root.#filledNegs && + this.#parent?.type === '!') { + end = '(?:$|\\/)'; + } + const final = start + src + end; + return [ + final, + (0, unescape_js_1.unescape)(src), + (this.#hasMagic = !!this.#hasMagic), + this.#uflag, + ]; + } + // We need to calculate the body *twice* if it's a repeat pattern + // at the start, once in nodot mode, then again in dot mode, so a + // pattern like *(?) can match 'x.y' + const repeated = this.type === '*' || this.type === '+'; + // some kind of extglob + const start = this.type === '!' ? '(?:(?!(?:' : '(?:'; + let body = this.#partsToRegExp(dot); + if (this.isStart() && this.isEnd() && !body && this.type !== '!') { + // invalid extglob, has to at least be *something* present, if it's + // the entire path portion. + const s = this.toString(); + this.#parts = [s]; + this.type = null; + this.#hasMagic = undefined; + return [s, (0, unescape_js_1.unescape)(this.toString()), false, false]; + } + // XXX abstract out this map method + let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot + ? '' + : this.#partsToRegExp(true); + if (bodyDotAllowed === body) { + bodyDotAllowed = ''; + } + if (bodyDotAllowed) { + body = `(?:${body})(?:${bodyDotAllowed})*?`; + } + // an empty !() is exactly equivalent to a starNoEmpty + let final = ''; + if (this.type === '!' && this.#emptyExt) { + final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty; + } + else { + const close = this.type === '!' + ? // !() must match something,but !(x) can match '' + '))' + + (this.isStart() && !dot && !allowDot ? startNoDot : '') + + star + + ')' + : this.type === '@' + ? ')' + : this.type === '?' + ? ')?' + : this.type === '+' && bodyDotAllowed + ? ')' + : this.type === '*' && bodyDotAllowed + ? `)?` + : `)${this.type}`; + final = start + body + close; + } + return [ + final, + (0, unescape_js_1.unescape)(body), + (this.#hasMagic = !!this.#hasMagic), + this.#uflag, + ]; + } + #partsToRegExp(dot) { + return this.#parts + .map(p => { + // extglob ASTs should only contain parent ASTs + /* c8 ignore start */ + if (typeof p === 'string') { + throw new Error('string type in extglob ast??'); + } + /* c8 ignore stop */ + // can ignore hasMagic, because extglobs are already always magic + const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot); + this.#uflag = this.#uflag || uflag; + return re; + }) + .filter(p => !(this.isStart() && this.isEnd()) || !!p) + .join('|'); + } + static #parseGlob(glob, hasMagic, noEmpty = false) { + let escaping = false; + let re = ''; + let uflag = false; + for (let i = 0; i < glob.length; i++) { + const c = glob.charAt(i); + if (escaping) { + escaping = false; + re += (reSpecials.has(c) ? '\\' : '') + c; + continue; + } + if (c === '\\') { + if (i === glob.length - 1) { + re += '\\\\'; + } + else { + escaping = true; + } + continue; + } + if (c === '[') { + const [src, needUflag, consumed, magic] = (0, brace_expressions_js_1.parseClass)(glob, i); + if (consumed) { + re += src; + uflag = uflag || needUflag; + i += consumed - 1; + hasMagic = hasMagic || magic; + continue; + } + } + if (c === '*') { + if (noEmpty && glob === '*') + re += starNoEmpty; + else + re += star; + hasMagic = true; + continue; + } + if (c === '?') { + re += qmark; + hasMagic = true; + continue; + } + re += regExpEscape(c); + } + return [re, (0, unescape_js_1.unescape)(glob), !!hasMagic, uflag]; + } +} +exports.AST = AST; +//# sourceMappingURL=ast.js.map + +/***/ }), + +/***/ 1090: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// translate the various posix character classes into unicode properties +// this works across all unicode locales +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parseClass = void 0; +// { : [, /u flag required, negated] +const posixClasses = { + '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true], + '[:alpha:]': ['\\p{L}\\p{Nl}', true], + '[:ascii:]': ['\\x' + '00-\\x' + '7f', false], + '[:blank:]': ['\\p{Zs}\\t', true], + '[:cntrl:]': ['\\p{Cc}', true], + '[:digit:]': ['\\p{Nd}', true], + '[:graph:]': ['\\p{Z}\\p{C}', true, true], + '[:lower:]': ['\\p{Ll}', true], + '[:print:]': ['\\p{C}', true], + '[:punct:]': ['\\p{P}', true], + '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true], + '[:upper:]': ['\\p{Lu}', true], + '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true], + '[:xdigit:]': ['A-Fa-f0-9', false], +}; +// only need to escape a few things inside of brace expressions +// escapes: [ \ ] - +const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&'); +// escape all regexp magic characters +const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +// everything has already been escaped, we just have to join +const rangesToString = (ranges) => ranges.join(''); +// takes a glob string at a posix brace expression, and returns +// an equivalent regular expression source, and boolean indicating +// whether the /u flag needs to be applied, and the number of chars +// consumed to parse the character class. +// This also removes out of order ranges, and returns ($.) if the +// entire class just no good. +const parseClass = (glob, position) => { + const pos = position; + /* c8 ignore start */ + if (glob.charAt(pos) !== '[') { + throw new Error('not in a brace expression'); + } + /* c8 ignore stop */ + const ranges = []; + const negs = []; + let i = pos + 1; + let sawStart = false; + let uflag = false; + let escaping = false; + let negate = false; + let endPos = pos; + let rangeStart = ''; + WHILE: while (i < glob.length) { + const c = glob.charAt(i); + if ((c === '!' || c === '^') && i === pos + 1) { + negate = true; + i++; + continue; + } + if (c === ']' && sawStart && !escaping) { + endPos = i + 1; + break; + } + sawStart = true; + if (c === '\\') { + if (!escaping) { + escaping = true; + i++; + continue; + } + // escaped \ char, fall through and treat like normal char + } + if (c === '[' && !escaping) { + // either a posix class, a collation equivalent, or just a [ + for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) { + if (glob.startsWith(cls, i)) { + // invalid, [a-[] is fine, but not [a-[:alpha]] + if (rangeStart) { + return ['$.', false, glob.length - pos, true]; + } + i += cls.length; + if (neg) + negs.push(unip); + else + ranges.push(unip); + uflag = uflag || u; + continue WHILE; + } + } + } + // now it's just a normal character, effectively + escaping = false; + if (rangeStart) { + // throw this range away if it's not valid, but others + // can still match. + if (c > rangeStart) { + ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c)); + } + else if (c === rangeStart) { + ranges.push(braceEscape(c)); + } + rangeStart = ''; + i++; + continue; + } + // now might be the start of a range. + // can be either c-d or c-] or c] or c] at this point + if (glob.startsWith('-]', i + 1)) { + ranges.push(braceEscape(c + '-')); + i += 2; + continue; + } + if (glob.startsWith('-', i + 1)) { + rangeStart = c; + i += 2; + continue; + } + // not the start of a range, just a single character + ranges.push(braceEscape(c)); + i++; + } + if (endPos < i) { + // didn't see the end of the class, not a valid class, + // but might still be valid as a literal match. + return ['', false, 0, false]; + } + // if we got no ranges and no negates, then we have a range that + // cannot possibly match anything, and that poisons the whole glob + if (!ranges.length && !negs.length) { + return ['$.', false, glob.length - pos, true]; + } + // if we got one positive range, and it's a single character, then that's + // not actually a magic pattern, it's just that one literal character. + // we should not treat that as "magic", we should just return the literal + // character. [_] is a perfectly valid way to escape glob magic chars. + if (negs.length === 0 && + ranges.length === 1 && + /^\\?.$/.test(ranges[0]) && + !negate) { + const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0]; + return [regexpEscape(r), false, endPos - pos, false]; + } + const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']'; + const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']'; + const comb = ranges.length && negs.length + ? '(' + sranges + '|' + snegs + ')' + : ranges.length + ? sranges + : snegs; + return [comb, uflag, endPos - pos, true]; +}; +exports.parseClass = parseClass; +//# sourceMappingURL=brace-expressions.js.map + +/***/ }), + +/***/ 800: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.escape = void 0; +/** + * Escape all magic characters in a glob pattern. + * + * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape} + * option is used, then characters are escaped by wrapping in `[]`, because + * a magic character wrapped in a character class can only be satisfied by + * that exact character. In this mode, `\` is _not_ escaped, because it is + * not interpreted as a magic character, but instead as a path separator. + */ +const escape = (s, { windowsPathsNoEscape = false, } = {}) => { + // don't need to escape +@! because we escape the parens + // that make those magic, and escaping ! as [!] isn't valid, + // because [!]] is a valid glob class meaning not ']'. + return windowsPathsNoEscape + ? s.replace(/[?*()[\]]/g, '[$&]') + : s.replace(/[?*()[\]\\]/g, '\\$&'); +}; +exports.escape = escape; +//# sourceMappingURL=escape.js.map + +/***/ }), + +/***/ 6507: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.unescape = exports.escape = exports.AST = exports.Minimatch = exports.match = exports.makeRe = exports.braceExpand = exports.defaults = exports.filter = exports.GLOBSTAR = exports.sep = exports.minimatch = void 0; +const brace_expansion_1 = __importDefault(__nccwpck_require__(4691)); +const assert_valid_pattern_js_1 = __nccwpck_require__(7305); +const ast_js_1 = __nccwpck_require__(1803); +const escape_js_1 = __nccwpck_require__(800); +const unescape_js_1 = __nccwpck_require__(851); +const minimatch = (p, pattern, options = {}) => { + (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false; + } + return new Minimatch(pattern, options).match(p); +}; +exports.minimatch = minimatch; +// Optimized checking for the most common glob patterns. +const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/; +const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext); +const starDotExtTestDot = (ext) => (f) => f.endsWith(ext); +const starDotExtTestNocase = (ext) => { + ext = ext.toLowerCase(); + return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext); +}; +const starDotExtTestNocaseDot = (ext) => { + ext = ext.toLowerCase(); + return (f) => f.toLowerCase().endsWith(ext); +}; +const starDotStarRE = /^\*+\.\*+$/; +const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.'); +const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.'); +const dotStarRE = /^\.\*+$/; +const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.'); +const starRE = /^\*+$/; +const starTest = (f) => f.length !== 0 && !f.startsWith('.'); +const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..'; +const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/; +const qmarksTestNocase = ([$0, ext = '']) => { + const noext = qmarksTestNoExt([$0]); + if (!ext) + return noext; + ext = ext.toLowerCase(); + return (f) => noext(f) && f.toLowerCase().endsWith(ext); +}; +const qmarksTestNocaseDot = ([$0, ext = '']) => { + const noext = qmarksTestNoExtDot([$0]); + if (!ext) + return noext; + ext = ext.toLowerCase(); + return (f) => noext(f) && f.toLowerCase().endsWith(ext); +}; +const qmarksTestDot = ([$0, ext = '']) => { + const noext = qmarksTestNoExtDot([$0]); + return !ext ? noext : (f) => noext(f) && f.endsWith(ext); +}; +const qmarksTest = ([$0, ext = '']) => { + const noext = qmarksTestNoExt([$0]); + return !ext ? noext : (f) => noext(f) && f.endsWith(ext); +}; +const qmarksTestNoExt = ([$0]) => { + const len = $0.length; + return (f) => f.length === len && !f.startsWith('.'); +}; +const qmarksTestNoExtDot = ([$0]) => { + const len = $0.length; + return (f) => f.length === len && f !== '.' && f !== '..'; +}; +/* c8 ignore start */ +const defaultPlatform = (typeof process === 'object' && process + ? (typeof process.env === 'object' && + process.env && + process.env.__MINIMATCH_TESTING_PLATFORM__) || + process.platform + : 'posix'); +const path = { + win32: { sep: '\\' }, + posix: { sep: '/' }, +}; +/* c8 ignore stop */ +exports.sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep; +exports.minimatch.sep = exports.sep; +exports.GLOBSTAR = Symbol('globstar **'); +exports.minimatch.GLOBSTAR = exports.GLOBSTAR; +// any single thing other than / +// don't need to escape / when using new RegExp() +const qmark = '[^/]'; +// * => any number of characters +const star = qmark + '*?'; +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?'; +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?'; +const filter = (pattern, options = {}) => (p) => (0, exports.minimatch)(p, pattern, options); +exports.filter = filter; +exports.minimatch.filter = exports.filter; +const ext = (a, b = {}) => Object.assign({}, a, b); +const defaults = (def) => { + if (!def || typeof def !== 'object' || !Object.keys(def).length) { + return exports.minimatch; + } + const orig = exports.minimatch; + const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options)); + return Object.assign(m, { + Minimatch: class Minimatch extends orig.Minimatch { + constructor(pattern, options = {}) { + super(pattern, ext(def, options)); + } + static defaults(options) { + return orig.defaults(ext(def, options)).Minimatch; + } + }, + AST: class AST extends orig.AST { + /* c8 ignore start */ + constructor(type, parent, options = {}) { + super(type, parent, ext(def, options)); + } + /* c8 ignore stop */ + static fromGlob(pattern, options = {}) { + return orig.AST.fromGlob(pattern, ext(def, options)); + } + }, + unescape: (s, options = {}) => orig.unescape(s, ext(def, options)), + escape: (s, options = {}) => orig.escape(s, ext(def, options)), + filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)), + defaults: (options) => orig.defaults(ext(def, options)), + makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)), + braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)), + match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)), + sep: orig.sep, + GLOBSTAR: exports.GLOBSTAR, + }); +}; +exports.defaults = defaults; +exports.minimatch.defaults = exports.defaults; +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +const braceExpand = (pattern, options = {}) => { + (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); + // Thanks to Yeting Li for + // improving this regexp to avoid a ReDOS vulnerability. + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + // shortcut. no need to expand. + return [pattern]; + } + return (0, brace_expansion_1.default)(pattern); +}; +exports.braceExpand = braceExpand; +exports.minimatch.braceExpand = exports.braceExpand; +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe(); +exports.makeRe = makeRe; +exports.minimatch.makeRe = exports.makeRe; +const match = (list, pattern, options = {}) => { + const mm = new Minimatch(pattern, options); + list = list.filter(f => mm.match(f)); + if (mm.options.nonull && !list.length) { + list.push(pattern); + } + return list; +}; +exports.match = match; +exports.minimatch.match = exports.match; +// replace stuff like \* with * +const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/; +const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +class Minimatch { + options; + set; + pattern; + windowsPathsNoEscape; + nonegate; + negate; + comment; + empty; + preserveMultipleSlashes; + partial; + globSet; + globParts; + nocase; + isWindows; + platform; + windowsNoMagicRoot; + regexp; + constructor(pattern, options = {}) { + (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); + options = options || {}; + this.options = options; + this.pattern = pattern; + this.platform = options.platform || defaultPlatform; + this.isWindows = this.platform === 'win32'; + this.windowsPathsNoEscape = + !!options.windowsPathsNoEscape || options.allowWindowsEscape === false; + if (this.windowsPathsNoEscape) { + this.pattern = this.pattern.replace(/\\/g, '/'); + } + this.preserveMultipleSlashes = !!options.preserveMultipleSlashes; + this.regexp = null; + this.negate = false; + this.nonegate = !!options.nonegate; + this.comment = false; + this.empty = false; + this.partial = !!options.partial; + this.nocase = !!this.options.nocase; + this.windowsNoMagicRoot = + options.windowsNoMagicRoot !== undefined + ? options.windowsNoMagicRoot + : !!(this.isWindows && this.nocase); + this.globSet = []; + this.globParts = []; + this.set = []; + // make the set of regexps etc. + this.make(); + } + hasMagic() { + if (this.options.magicalBraces && this.set.length > 1) { + return true; + } + for (const pattern of this.set) { + for (const part of pattern) { + if (typeof part !== 'string') + return true; + } + } + return false; + } + debug(..._) { } + make() { + const pattern = this.pattern; + const options = this.options; + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true; + return; + } + if (!pattern) { + this.empty = true; + return; + } + // step 1: figure out negation, etc. + this.parseNegate(); + // step 2: expand braces + this.globSet = [...new Set(this.braceExpand())]; + if (options.debug) { + this.debug = (...args) => console.error(...args); + } + this.debug(this.pattern, this.globSet); + // step 3: now we have a set, so turn each one into a series of + // path-portion matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + // + // First, we preprocess to make the glob pattern sets a bit simpler + // and deduped. There are some perf-killing patterns that can cause + // problems with a glob walk, but we can simplify them down a bit. + const rawGlobParts = this.globSet.map(s => this.slashSplit(s)); + this.globParts = this.preprocess(rawGlobParts); + this.debug(this.pattern, this.globParts); + // glob --> regexps + let set = this.globParts.map((s, _, __) => { + if (this.isWindows && this.windowsNoMagicRoot) { + // check if it's a drive or unc path. + const isUNC = s[0] === '' && + s[1] === '' && + (s[2] === '?' || !globMagic.test(s[2])) && + !globMagic.test(s[3]); + const isDrive = /^[a-z]:/i.test(s[0]); + if (isUNC) { + return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))]; + } + else if (isDrive) { + return [s[0], ...s.slice(1).map(ss => this.parse(ss))]; + } + } + return s.map(ss => this.parse(ss)); + }); + this.debug(this.pattern, set); + // filter out everything that didn't compile properly. + this.set = set.filter(s => s.indexOf(false) === -1); + // do not treat the ? in UNC paths as magic + if (this.isWindows) { + for (let i = 0; i < this.set.length; i++) { + const p = this.set[i]; + if (p[0] === '' && + p[1] === '' && + this.globParts[i][2] === '?' && + typeof p[3] === 'string' && + /^[a-z]:$/i.test(p[3])) { + p[2] = '?'; + } + } + } + this.debug(this.pattern, this.set); + } + // various transforms to equivalent pattern sets that are + // faster to process in a filesystem walk. The goal is to + // eliminate what we can, and push all ** patterns as far + // to the right as possible, even if it increases the number + // of patterns that we have to process. + preprocess(globParts) { + // if we're not in globstar mode, then turn all ** into * + if (this.options.noglobstar) { + for (let i = 0; i < globParts.length; i++) { + for (let j = 0; j < globParts[i].length; j++) { + if (globParts[i][j] === '**') { + globParts[i][j] = '*'; + } + } + } + } + const { optimizationLevel = 1 } = this.options; + if (optimizationLevel >= 2) { + // aggressive optimization for the purpose of fs walking + globParts = this.firstPhasePreProcess(globParts); + globParts = this.secondPhasePreProcess(globParts); + } + else if (optimizationLevel >= 1) { + // just basic optimizations to remove some .. parts + globParts = this.levelOneOptimize(globParts); + } + else { + // just collapse multiple ** portions into one + globParts = this.adjascentGlobstarOptimize(globParts); + } + return globParts; + } + // just get rid of adjascent ** portions + adjascentGlobstarOptimize(globParts) { + return globParts.map(parts => { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let i = gs; + while (parts[i + 1] === '**') { + i++; + } + if (i !== gs) { + parts.splice(gs, i - gs); + } + } + return parts; + }); + } + // get rid of adjascent ** and resolve .. portions + levelOneOptimize(globParts) { + return globParts.map(parts => { + parts = parts.reduce((set, part) => { + const prev = set[set.length - 1]; + if (part === '**' && prev === '**') { + return set; + } + if (part === '..') { + if (prev && prev !== '..' && prev !== '.' && prev !== '**') { + set.pop(); + return set; + } + } + set.push(part); + return set; + }, []); + return parts.length === 0 ? [''] : parts; + }); + } + levelTwoFileOptimize(parts) { + if (!Array.isArray(parts)) { + parts = this.slashSplit(parts); + } + let didSomething = false; + do { + didSomething = false; + //

// -> 
/
+            if (!this.preserveMultipleSlashes) {
+                for (let i = 1; i < parts.length - 1; i++) {
+                    const p = parts[i];
+                    // don't squeeze out UNC patterns
+                    if (i === 1 && p === '' && parts[0] === '')
+                        continue;
+                    if (p === '.' || p === '') {
+                        didSomething = true;
+                        parts.splice(i, 1);
+                        i--;
+                    }
+                }
+                if (parts[0] === '.' &&
+                    parts.length === 2 &&
+                    (parts[1] === '.' || parts[1] === '')) {
+                    didSomething = true;
+                    parts.pop();
+                }
+            }
+            // 
/

/../ ->

/
+            let dd = 0;
+            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                const p = parts[dd - 1];
+                if (p && p !== '.' && p !== '..' && p !== '**') {
+                    didSomething = true;
+                    parts.splice(dd - 1, 2);
+                    dd -= 2;
+                }
+            }
+        } while (didSomething);
+        return parts.length === 0 ? [''] : parts;
+    }
+    // First phase: single-pattern processing
+    // 
 is 1 or more portions
+    //  is 1 or more portions
+    // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+    // 
/

/../ ->

/
+    // **/**/ -> **/
+    //
+    // **/*/ -> */**/ <== not valid because ** doesn't follow
+    // this WOULD be allowed if ** did follow symlinks, or * didn't
+    firstPhasePreProcess(globParts) {
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

/**/**/ -> 
/**/
+                        gss++;
+                    }
+                    // eg, if gs is 2 and gss is 4, that means we have 3 **
+                    // parts, and can remove 2 of them.
+                    if (gss > gs) {
+                        parts.splice(gs + 1, gss - gs);
+                    }
+                    let next = parts[gs + 1];
+                    const p = parts[gs + 2];
+                    const p2 = parts[gs + 3];
+                    if (next !== '..')
+                        continue;
+                    if (!p ||
+                        p === '.' ||
+                        p === '..' ||
+                        !p2 ||
+                        p2 === '.' ||
+                        p2 === '..') {
+                        continue;
+                    }
+                    didSomething = true;
+                    // edit parts in place, and push the new one
+                    parts.splice(gs, 1);
+                    const other = parts.slice(0);
+                    other[gs] = '**';
+                    globParts.push(other);
+                    gs--;
+                }
+                // 
// -> 
/
+                if (!this.preserveMultipleSlashes) {
+                    for (let i = 1; i < parts.length - 1; i++) {
+                        const p = parts[i];
+                        // don't squeeze out UNC patterns
+                        if (i === 1 && p === '' && parts[0] === '')
+                            continue;
+                        if (p === '.' || p === '') {
+                            didSomething = true;
+                            parts.splice(i, 1);
+                            i--;
+                        }
+                    }
+                    if (parts[0] === '.' &&
+                        parts.length === 2 &&
+                        (parts[1] === '.' || parts[1] === '')) {
+                        didSomething = true;
+                        parts.pop();
+                    }
+                }
+                // 
/

/../ ->

/
+                let dd = 0;
+                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                    const p = parts[dd - 1];
+                    if (p && p !== '.' && p !== '..' && p !== '**') {
+                        didSomething = true;
+                        const needDot = dd === 1 && parts[dd + 1] === '**';
+                        const splin = needDot ? ['.'] : [];
+                        parts.splice(dd - 1, 2, ...splin);
+                        if (parts.length === 0)
+                            parts.push('');
+                        dd -= 2;
+                    }
+                }
+            }
+        } while (didSomething);
+        return globParts;
+    }
+    // second phase: multi-pattern dedupes
+    // {
/*/,
/

/} ->

/*/
+    // {
/,
/} -> 
/
+    // {
/**/,
/} -> 
/**/
+    //
+    // {
/**/,
/**/

/} ->

/**/
+    // ^-- not valid because ** doens't follow symlinks
+    secondPhasePreProcess(globParts) {
+        for (let i = 0; i < globParts.length - 1; i++) {
+            for (let j = i + 1; j < globParts.length; j++) {
+                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+                if (matched) {
+                    globParts[i] = [];
+                    globParts[j] = matched;
+                    break;
+                }
+            }
+        }
+        return globParts.filter(gs => gs.length);
+    }
+    partsMatch(a, b, emptyGSMatch = false) {
+        let ai = 0;
+        let bi = 0;
+        let result = [];
+        let which = '';
+        while (ai < a.length && bi < b.length) {
+            if (a[ai] === b[bi]) {
+                result.push(which === 'b' ? b[bi] : a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
+                result.push(a[ai]);
+                ai++;
+            }
+            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
+                result.push(b[bi]);
+                bi++;
+            }
+            else if (a[ai] === '*' &&
+                b[bi] &&
+                (this.options.dot || !b[bi].startsWith('.')) &&
+                b[bi] !== '**') {
+                if (which === 'b')
+                    return false;
+                which = 'a';
+                result.push(a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (b[bi] === '*' &&
+                a[ai] &&
+                (this.options.dot || !a[ai].startsWith('.')) &&
+                a[ai] !== '**') {
+                if (which === 'a')
+                    return false;
+                which = 'b';
+                result.push(b[bi]);
+                ai++;
+                bi++;
+            }
+            else {
+                return false;
+            }
+        }
+        // if we fall out of the loop, it means they two are identical
+        // as long as their lengths match
+        return a.length === b.length && result;
+    }
+    parseNegate() {
+        if (this.nonegate)
+            return;
+        const pattern = this.pattern;
+        let negate = false;
+        let negateOffset = 0;
+        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
+            negate = !negate;
+            negateOffset++;
+        }
+        if (negateOffset)
+            this.pattern = pattern.slice(negateOffset);
+        this.negate = negate;
+    }
+    // set partial to true to test if, for example,
+    // "/a/b" matches the start of "/*/b/*/d"
+    // Partial means, if you run out of file before you run
+    // out of pattern, then that's fine, as long as all
+    // the parts match.
+    matchOne(file, pattern, partial = false) {
+        const options = this.options;
+        // UNC paths like //?/X:/... can match X:/... and vice versa
+        // Drive letters in absolute drive or unc paths are always compared
+        // case-insensitively.
+        if (this.isWindows) {
+            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
+            const fileUNC = !fileDrive &&
+                file[0] === '' &&
+                file[1] === '' &&
+                file[2] === '?' &&
+                /^[a-z]:$/i.test(file[3]);
+            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
+            const patternUNC = !patternDrive &&
+                pattern[0] === '' &&
+                pattern[1] === '' &&
+                pattern[2] === '?' &&
+                typeof pattern[3] === 'string' &&
+                /^[a-z]:$/i.test(pattern[3]);
+            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
+            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
+            if (typeof fdi === 'number' && typeof pdi === 'number') {
+                const [fd, pd] = [file[fdi], pattern[pdi]];
+                if (fd.toLowerCase() === pd.toLowerCase()) {
+                    pattern[pdi] = fd;
+                    if (pdi > fdi) {
+                        pattern = pattern.slice(pdi);
+                    }
+                    else if (fdi > pdi) {
+                        file = file.slice(fdi);
+                    }
+                }
+            }
+        }
+        // resolve and reduce . and .. portions in the file as well.
+        // dont' need to do the second phase, because it's only one string[]
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            file = this.levelTwoFileOptimize(file);
+        }
+        this.debug('matchOne', this, { file, pattern });
+        this.debug('matchOne', file.length, pattern.length);
+        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+            this.debug('matchOne loop');
+            var p = pattern[pi];
+            var f = file[fi];
+            this.debug(pattern, p, f);
+            // should be impossible.
+            // some invalid regexp stuff in the set.
+            /* c8 ignore start */
+            if (p === false) {
+                return false;
+            }
+            /* c8 ignore stop */
+            if (p === exports.GLOBSTAR) {
+                this.debug('GLOBSTAR', [pattern, p, f]);
+                // "**"
+                // a/**/b/**/c would match the following:
+                // a/b/x/y/z/c
+                // a/x/y/z/b/c
+                // a/b/x/b/x/c
+                // a/b/c
+                // To do this, take the rest of the pattern after
+                // the **, and see if it would match the file remainder.
+                // If so, return success.
+                // If not, the ** "swallows" a segment, and try again.
+                // This is recursively awful.
+                //
+                // a/**/b/**/c matching a/b/x/y/z/c
+                // - a matches a
+                // - doublestar
+                //   - matchOne(b/x/y/z/c, b/**/c)
+                //     - b matches b
+                //     - doublestar
+                //       - matchOne(x/y/z/c, c) -> no
+                //       - matchOne(y/z/c, c) -> no
+                //       - matchOne(z/c, c) -> no
+                //       - matchOne(c, c) yes, hit
+                var fr = fi;
+                var pr = pi + 1;
+                if (pr === pl) {
+                    this.debug('** at the end');
+                    // a ** at the end will just swallow the rest.
+                    // We have found a match.
+                    // however, it will not swallow /.x, unless
+                    // options.dot is set.
+                    // . and .. are *never* matched by **, for explosively
+                    // exponential reasons.
+                    for (; fi < fl; fi++) {
+                        if (file[fi] === '.' ||
+                            file[fi] === '..' ||
+                            (!options.dot && file[fi].charAt(0) === '.'))
+                            return false;
+                    }
+                    return true;
+                }
+                // ok, let's see if we can swallow whatever we can.
+                while (fr < fl) {
+                    var swallowee = file[fr];
+                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
+                    // XXX remove this slice.  Just pass the start index.
+                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+                        this.debug('globstar found match!', fr, fl, swallowee);
+                        // found a match.
+                        return true;
+                    }
+                    else {
+                        // can't swallow "." or ".." ever.
+                        // can only swallow ".foo" when explicitly asked.
+                        if (swallowee === '.' ||
+                            swallowee === '..' ||
+                            (!options.dot && swallowee.charAt(0) === '.')) {
+                            this.debug('dot detected!', file, fr, pattern, pr);
+                            break;
+                        }
+                        // ** swallows a segment, and continue.
+                        this.debug('globstar swallow a segment, and continue');
+                        fr++;
+                    }
+                }
+                // no match was found.
+                // However, in partial mode, we can't say this is necessarily over.
+                /* c8 ignore start */
+                if (partial) {
+                    // ran out of file
+                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
+                    if (fr === fl) {
+                        return true;
+                    }
+                }
+                /* c8 ignore stop */
+                return false;
+            }
+            // something other than **
+            // non-magic patterns just have to match exactly
+            // patterns with magic have been turned into regexps.
+            let hit;
+            if (typeof p === 'string') {
+                hit = f === p;
+                this.debug('string match', p, f, hit);
+            }
+            else {
+                hit = p.test(f);
+                this.debug('pattern match', p, f, hit);
+            }
+            if (!hit)
+                return false;
+        }
+        // Note: ending in / means that we'll get a final ""
+        // at the end of the pattern.  This can only match a
+        // corresponding "" at the end of the file.
+        // If the file ends in /, then it can only match a
+        // a pattern that ends in /, unless the pattern just
+        // doesn't have any more for it. But, a/b/ should *not*
+        // match "a/b/*", even though "" matches against the
+        // [^/]*? pattern, except in partial mode, where it might
+        // simply not be reached yet.
+        // However, a/b/ should still satisfy a/*
+        // now either we fell off the end of the pattern, or we're done.
+        if (fi === fl && pi === pl) {
+            // ran out of pattern and filename at the same time.
+            // an exact hit!
+            return true;
+        }
+        else if (fi === fl) {
+            // ran out of file, but still had pattern left.
+            // this is ok if we're doing the match as part of
+            // a glob fs traversal.
+            return partial;
+        }
+        else if (pi === pl) {
+            // ran out of pattern, still have file left.
+            // this is only acceptable if we're on the very last
+            // empty segment of a file with a trailing slash.
+            // a/* should match a/b/
+            return fi === fl - 1 && file[fi] === '';
+            /* c8 ignore start */
+        }
+        else {
+            // should be unreachable.
+            throw new Error('wtf?');
+        }
+        /* c8 ignore stop */
+    }
+    braceExpand() {
+        return (0, exports.braceExpand)(this.pattern, this.options);
+    }
+    parse(pattern) {
+        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+        const options = this.options;
+        // shortcuts
+        if (pattern === '**')
+            return exports.GLOBSTAR;
+        if (pattern === '')
+            return '';
+        // far and away, the most common glob pattern parts are
+        // *, *.*, and *.  Add a fast check method for those.
+        let m;
+        let fastTest = null;
+        if ((m = pattern.match(starRE))) {
+            fastTest = options.dot ? starTestDot : starTest;
+        }
+        else if ((m = pattern.match(starDotExtRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? starDotExtTestNocaseDot
+                    : starDotExtTestNocase
+                : options.dot
+                    ? starDotExtTestDot
+                    : starDotExtTest)(m[1]);
+        }
+        else if ((m = pattern.match(qmarksRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? qmarksTestNocaseDot
+                    : qmarksTestNocase
+                : options.dot
+                    ? qmarksTestDot
+                    : qmarksTest)(m);
+        }
+        else if ((m = pattern.match(starDotStarRE))) {
+            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+        }
+        else if ((m = pattern.match(dotStarRE))) {
+            fastTest = dotStarTest;
+        }
+        const re = ast_js_1.AST.fromGlob(pattern, this.options).toMMPattern();
+        if (fastTest && typeof re === 'object') {
+            // Avoids overriding in frozen environments
+            Reflect.defineProperty(re, 'test', { value: fastTest });
+        }
+        return re;
+    }
+    makeRe() {
+        if (this.regexp || this.regexp === false)
+            return this.regexp;
+        // at this point, this.set is a 2d array of partial
+        // pattern strings, or "**".
+        //
+        // It's better to use .match().  This function shouldn't
+        // be used, really, but it's pretty convenient sometimes,
+        // when you just want to work with a regex.
+        const set = this.set;
+        if (!set.length) {
+            this.regexp = false;
+            return this.regexp;
+        }
+        const options = this.options;
+        const twoStar = options.noglobstar
+            ? star
+            : options.dot
+                ? twoStarDot
+                : twoStarNoDot;
+        const flags = new Set(options.nocase ? ['i'] : []);
+        // regexpify non-globstar patterns
+        // if ** is only item, then we just do one twoStar
+        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
+        // if ** is last, append (\/twoStar|) to previous
+        // if ** is in the middle, append (\/|\/twoStar\/) to previous
+        // then filter out GLOBSTAR symbols
+        let re = set
+            .map(pattern => {
+            const pp = pattern.map(p => {
+                if (p instanceof RegExp) {
+                    for (const f of p.flags.split(''))
+                        flags.add(f);
+                }
+                return typeof p === 'string'
+                    ? regExpEscape(p)
+                    : p === exports.GLOBSTAR
+                        ? exports.GLOBSTAR
+                        : p._src;
+            });
+            pp.forEach((p, i) => {
+                const next = pp[i + 1];
+                const prev = pp[i - 1];
+                if (p !== exports.GLOBSTAR || prev === exports.GLOBSTAR) {
+                    return;
+                }
+                if (prev === undefined) {
+                    if (next !== undefined && next !== exports.GLOBSTAR) {
+                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
+                    }
+                    else {
+                        pp[i] = twoStar;
+                    }
+                }
+                else if (next === undefined) {
+                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
+                }
+                else if (next !== exports.GLOBSTAR) {
+                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
+                    pp[i + 1] = exports.GLOBSTAR;
+                }
+            });
+            return pp.filter(p => p !== exports.GLOBSTAR).join('/');
+        })
+            .join('|');
+        // need to wrap in parens if we had more than one thing with |,
+        // otherwise only the first will be anchored to ^ and the last to $
+        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
+        // must match entire pattern
+        // ending in a * or ** will make it less strict.
+        re = '^' + open + re + close + '$';
+        // can match anything, as long as it's not this.
+        if (this.negate)
+            re = '^(?!' + re + ').+$';
+        try {
+            this.regexp = new RegExp(re, [...flags].join(''));
+            /* c8 ignore start */
+        }
+        catch (ex) {
+            // should be impossible
+            this.regexp = false;
+        }
+        /* c8 ignore stop */
+        return this.regexp;
+    }
+    slashSplit(p) {
+        // if p starts with // on windows, we preserve that
+        // so that UNC paths aren't broken.  Otherwise, any number of
+        // / characters are coalesced into one, unless
+        // preserveMultipleSlashes is set to true.
+        if (this.preserveMultipleSlashes) {
+            return p.split('/');
+        }
+        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+            // add an extra '' for the one we lose
+            return ['', ...p.split(/\/+/)];
+        }
+        else {
+            return p.split(/\/+/);
+        }
+    }
+    match(f, partial = this.partial) {
+        this.debug('match', f, this.pattern);
+        // short-circuit in the case of busted things.
+        // comments, etc.
+        if (this.comment) {
+            return false;
+        }
+        if (this.empty) {
+            return f === '';
+        }
+        if (f === '/' && partial) {
+            return true;
+        }
+        const options = this.options;
+        // windows: need to use /, not \
+        if (this.isWindows) {
+            f = f.split('\\').join('/');
+        }
+        // treat the test path as a set of pathparts.
+        const ff = this.slashSplit(f);
+        this.debug(this.pattern, 'split', ff);
+        // just ONE of the pattern sets in this.set needs to match
+        // in order for it to be valid.  If negating, then just one
+        // match means that we have failed.
+        // Either way, return on the first hit.
+        const set = this.set;
+        this.debug(this.pattern, 'set', set);
+        // Find the basename of the path by looking for the last non-empty segment
+        let filename = ff[ff.length - 1];
+        if (!filename) {
+            for (let i = ff.length - 2; !filename && i >= 0; i--) {
+                filename = ff[i];
+            }
+        }
+        for (let i = 0; i < set.length; i++) {
+            const pattern = set[i];
+            let file = ff;
+            if (options.matchBase && pattern.length === 1) {
+                file = [filename];
+            }
+            const hit = this.matchOne(file, pattern, partial);
+            if (hit) {
+                if (options.flipNegate) {
+                    return true;
+                }
+                return !this.negate;
+            }
+        }
+        // didn't get any hits.  this is success if it's a negative
+        // pattern, failure otherwise.
+        if (options.flipNegate) {
+            return false;
+        }
+        return this.negate;
+    }
+    static defaults(def) {
+        return exports.minimatch.defaults(def).Minimatch;
+    }
+}
+exports.Minimatch = Minimatch;
+/* c8 ignore start */
+var ast_js_2 = __nccwpck_require__(1803);
+Object.defineProperty(exports, "AST", ({ enumerable: true, get: function () { return ast_js_2.AST; } }));
+var escape_js_2 = __nccwpck_require__(800);
+Object.defineProperty(exports, "escape", ({ enumerable: true, get: function () { return escape_js_2.escape; } }));
+var unescape_js_2 = __nccwpck_require__(851);
+Object.defineProperty(exports, "unescape", ({ enumerable: true, get: function () { return unescape_js_2.unescape; } }));
+/* c8 ignore stop */
+exports.minimatch.AST = ast_js_1.AST;
+exports.minimatch.Minimatch = Minimatch;
+exports.minimatch.escape = escape_js_1.escape;
+exports.minimatch.unescape = unescape_js_1.unescape;
+//# sourceMappingURL=index.js.map
+
+/***/ }),
+
+/***/ 851:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.unescape = void 0;
+/**
+ * Un-escape a string that has been escaped with {@link escape}.
+ *
+ * If the {@link windowsPathsNoEscape} option is used, then square-brace
+ * escapes are removed, but not backslash escapes.  For example, it will turn
+ * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
+ * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
+ *
+ * When `windowsPathsNoEscape` is not set, then both brace escapes and
+ * backslash escapes are removed.
+ *
+ * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
+ * or unescaped.
+ */
+const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    return windowsPathsNoEscape
+        ? s.replace(/\[([^\/\\])\]/g, '$1')
+        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
+};
+exports.unescape = unescape;
+//# sourceMappingURL=unescape.js.map
+
+/***/ }),
+
+/***/ 8275:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0;
+const proc = typeof process === 'object' && process
+    ? process
+    : {
+        stdout: null,
+        stderr: null,
+    };
+const node_events_1 = __nccwpck_require__(8474);
+const node_stream_1 = __importDefault(__nccwpck_require__(7075));
+const node_string_decoder_1 = __nccwpck_require__(6193);
+/**
+ * Return true if the argument is a Minipass stream, Node stream, or something
+ * else that Minipass can interact with.
+ */
+const isStream = (s) => !!s &&
+    typeof s === 'object' &&
+    (s instanceof Minipass ||
+        s instanceof node_stream_1.default ||
+        (0, exports.isReadable)(s) ||
+        (0, exports.isWritable)(s));
+exports.isStream = isStream;
+/**
+ * Return true if the argument is a valid {@link Minipass.Readable}
+ */
+const isReadable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof node_events_1.EventEmitter &&
+    typeof s.pipe === 'function' &&
+    // node core Writable streams have a pipe() method, but it throws
+    s.pipe !== node_stream_1.default.Writable.prototype.pipe;
+exports.isReadable = isReadable;
+/**
+ * Return true if the argument is a valid {@link Minipass.Writable}
+ */
+const isWritable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof node_events_1.EventEmitter &&
+    typeof s.write === 'function' &&
+    typeof s.end === 'function';
+exports.isWritable = isWritable;
+const EOF = Symbol('EOF');
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
+const EMITTED_END = Symbol('emittedEnd');
+const EMITTING_END = Symbol('emittingEnd');
+const EMITTED_ERROR = Symbol('emittedError');
+const CLOSED = Symbol('closed');
+const READ = Symbol('read');
+const FLUSH = Symbol('flush');
+const FLUSHCHUNK = Symbol('flushChunk');
+const ENCODING = Symbol('encoding');
+const DECODER = Symbol('decoder');
+const FLOWING = Symbol('flowing');
+const PAUSED = Symbol('paused');
+const RESUME = Symbol('resume');
+const BUFFER = Symbol('buffer');
+const PIPES = Symbol('pipes');
+const BUFFERLENGTH = Symbol('bufferLength');
+const BUFFERPUSH = Symbol('bufferPush');
+const BUFFERSHIFT = Symbol('bufferShift');
+const OBJECTMODE = Symbol('objectMode');
+// internal event when stream is destroyed
+const DESTROYED = Symbol('destroyed');
+// internal event when stream has an error
+const ERROR = Symbol('error');
+const EMITDATA = Symbol('emitData');
+const EMITEND = Symbol('emitEnd');
+const EMITEND2 = Symbol('emitEnd2');
+const ASYNC = Symbol('async');
+const ABORT = Symbol('abort');
+const ABORTED = Symbol('aborted');
+const SIGNAL = Symbol('signal');
+const DATALISTENERS = Symbol('dataListeners');
+const DISCARDED = Symbol('discarded');
+const defer = (fn) => Promise.resolve().then(fn);
+const nodefer = (fn) => fn();
+const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
+const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
+    (!!b &&
+        typeof b === 'object' &&
+        b.constructor &&
+        b.constructor.name === 'ArrayBuffer' &&
+        b.byteLength >= 0);
+const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
+/**
+ * Internal class representing a pipe to a destination stream.
+ *
+ * @internal
+ */
+class Pipe {
+    src;
+    dest;
+    opts;
+    ondrain;
+    constructor(src, dest, opts) {
+        this.src = src;
+        this.dest = dest;
+        this.opts = opts;
+        this.ondrain = () => src[RESUME]();
+        this.dest.on('drain', this.ondrain);
+    }
+    unpipe() {
+        this.dest.removeListener('drain', this.ondrain);
+    }
+    // only here for the prototype
+    /* c8 ignore start */
+    proxyErrors(_er) { }
+    /* c8 ignore stop */
+    end() {
+        this.unpipe();
+        if (this.opts.end)
+            this.dest.end();
+    }
+}
+/**
+ * Internal class representing a pipe to a destination stream where
+ * errors are proxied.
+ *
+ * @internal
+ */
+class PipeProxyErrors extends Pipe {
+    unpipe() {
+        this.src.removeListener('error', this.proxyErrors);
+        super.unpipe();
+    }
+    constructor(src, dest, opts) {
+        super(src, dest, opts);
+        this.proxyErrors = er => dest.emit('error', er);
+        src.on('error', this.proxyErrors);
+    }
+}
+const isObjectModeOptions = (o) => !!o.objectMode;
+const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
+/**
+ * Main export, the Minipass class
+ *
+ * `RType` is the type of data emitted, defaults to Buffer
+ *
+ * `WType` is the type of data to be written, if RType is buffer or string,
+ * then any {@link Minipass.ContiguousData} is allowed.
+ *
+ * `Events` is the set of event handler signatures that this object
+ * will emit, see {@link Minipass.Events}
+ */
+class Minipass extends node_events_1.EventEmitter {
+    [FLOWING] = false;
+    [PAUSED] = false;
+    [PIPES] = [];
+    [BUFFER] = [];
+    [OBJECTMODE];
+    [ENCODING];
+    [ASYNC];
+    [DECODER];
+    [EOF] = false;
+    [EMITTED_END] = false;
+    [EMITTING_END] = false;
+    [CLOSED] = false;
+    [EMITTED_ERROR] = null;
+    [BUFFERLENGTH] = 0;
+    [DESTROYED] = false;
+    [SIGNAL];
+    [ABORTED] = false;
+    [DATALISTENERS] = 0;
+    [DISCARDED] = false;
+    /**
+     * true if the stream can be written
+     */
+    writable = true;
+    /**
+     * true if the stream can be read
+     */
+    readable = true;
+    /**
+     * If `RType` is Buffer, then options do not need to be provided.
+     * Otherwise, an options object must be provided to specify either
+     * {@link Minipass.SharedOptions.objectMode} or
+     * {@link Minipass.SharedOptions.encoding}, as appropriate.
+     */
+    constructor(...args) {
+        const options = (args[0] ||
+            {});
+        super();
+        if (options.objectMode && typeof options.encoding === 'string') {
+            throw new TypeError('Encoding and objectMode may not be used together');
+        }
+        if (isObjectModeOptions(options)) {
+            this[OBJECTMODE] = true;
+            this[ENCODING] = null;
+        }
+        else if (isEncodingOptions(options)) {
+            this[ENCODING] = options.encoding;
+            this[OBJECTMODE] = false;
+        }
+        else {
+            this[OBJECTMODE] = false;
+            this[ENCODING] = null;
+        }
+        this[ASYNC] = !!options.async;
+        this[DECODER] = this[ENCODING]
+            ? new node_string_decoder_1.StringDecoder(this[ENCODING])
+            : null;
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposeBuffer === true) {
+            Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
+        }
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposePipes === true) {
+            Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
+        }
+        const { signal } = options;
+        if (signal) {
+            this[SIGNAL] = signal;
+            if (signal.aborted) {
+                this[ABORT]();
+            }
+            else {
+                signal.addEventListener('abort', () => this[ABORT]());
+            }
+        }
+    }
+    /**
+     * The amount of data stored in the buffer waiting to be read.
+     *
+     * For Buffer strings, this will be the total byte length.
+     * For string encoding streams, this will be the string character length,
+     * according to JavaScript's `string.length` logic.
+     * For objectMode streams, this is a count of the items waiting to be
+     * emitted.
+     */
+    get bufferLength() {
+        return this[BUFFERLENGTH];
+    }
+    /**
+     * The `BufferEncoding` currently in use, or `null`
+     */
+    get encoding() {
+        return this[ENCODING];
+    }
+    /**
+     * @deprecated - This is a read only property
+     */
+    set encoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * @deprecated - Encoding may only be set at instantiation time
+     */
+    setEncoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * True if this is an objectMode stream
+     */
+    get objectMode() {
+        return this[OBJECTMODE];
+    }
+    /**
+     * @deprecated - This is a read-only property
+     */
+    set objectMode(_om) {
+        throw new Error('objectMode must be set at instantiation time');
+    }
+    /**
+     * true if this is an async stream
+     */
+    get ['async']() {
+        return this[ASYNC];
+    }
+    /**
+     * Set to true to make this stream async.
+     *
+     * Once set, it cannot be unset, as this would potentially cause incorrect
+     * behavior.  Ie, a sync stream can be made async, but an async stream
+     * cannot be safely made sync.
+     */
+    set ['async'](a) {
+        this[ASYNC] = this[ASYNC] || !!a;
+    }
+    // drop everything and get out of the flow completely
+    [ABORT]() {
+        this[ABORTED] = true;
+        this.emit('abort', this[SIGNAL]?.reason);
+        this.destroy(this[SIGNAL]?.reason);
+    }
+    /**
+     * True if the stream has been aborted.
+     */
+    get aborted() {
+        return this[ABORTED];
+    }
+    /**
+     * No-op setter. Stream aborted status is set via the AbortSignal provided
+     * in the constructor options.
+     */
+    set aborted(_) { }
+    write(chunk, encoding, cb) {
+        if (this[ABORTED])
+            return false;
+        if (this[EOF])
+            throw new Error('write after end');
+        if (this[DESTROYED]) {
+            this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
+            return true;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (!encoding)
+            encoding = 'utf8';
+        const fn = this[ASYNC] ? defer : nodefer;
+        // convert array buffers and typed array views into buffers
+        // at some point in the future, we may want to do the opposite!
+        // leave strings and buffers as-is
+        // anything is only allowed if in object mode, so throw
+        if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
+            if (isArrayBufferView(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
+            }
+            else if (isArrayBufferLike(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk);
+            }
+            else if (typeof chunk !== 'string') {
+                throw new Error('Non-contiguous data written to non-objectMode stream');
+            }
+        }
+        // handle object mode up front, since it's simpler
+        // this yields better performance, fewer checks later.
+        if (this[OBJECTMODE]) {
+            // maybe impossible?
+            /* c8 ignore start */
+            if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+                this[FLUSH](true);
+            /* c8 ignore stop */
+            if (this[FLOWING])
+                this.emit('data', chunk);
+            else
+                this[BUFFERPUSH](chunk);
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // at this point the chunk is a buffer or string
+        // don't buffer it up or send it to the decoder
+        if (!chunk.length) {
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // fast-path writing strings of same encoding to a stream with
+        // an empty buffer, skipping the buffer/decoder dance
+        if (typeof chunk === 'string' &&
+            // unless it is a string already ready for us to use
+            !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = Buffer.from(chunk, encoding);
+        }
+        if (Buffer.isBuffer(chunk) && this[ENCODING]) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = this[DECODER].write(chunk);
+        }
+        // Note: flushing CAN potentially switch us into not-flowing mode
+        if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+            this[FLUSH](true);
+        if (this[FLOWING])
+            this.emit('data', chunk);
+        else
+            this[BUFFERPUSH](chunk);
+        if (this[BUFFERLENGTH] !== 0)
+            this.emit('readable');
+        if (cb)
+            fn(cb);
+        return this[FLOWING];
+    }
+    /**
+     * Low-level explicit read method.
+     *
+     * In objectMode, the argument is ignored, and one item is returned if
+     * available.
+     *
+     * `n` is the number of bytes (or in the case of encoding streams,
+     * characters) to consume. If `n` is not provided, then the entire buffer
+     * is returned, or `null` is returned if no data is available.
+     *
+     * If `n` is greater that the amount of data in the internal buffer,
+     * then `null` is returned.
+     */
+    read(n) {
+        if (this[DESTROYED])
+            return null;
+        this[DISCARDED] = false;
+        if (this[BUFFERLENGTH] === 0 ||
+            n === 0 ||
+            (n && n > this[BUFFERLENGTH])) {
+            this[MAYBE_EMIT_END]();
+            return null;
+        }
+        if (this[OBJECTMODE])
+            n = null;
+        if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
+            // not object mode, so if we have an encoding, then RType is string
+            // otherwise, must be Buffer
+            this[BUFFER] = [
+                (this[ENCODING]
+                    ? this[BUFFER].join('')
+                    : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
+            ];
+        }
+        const ret = this[READ](n || null, this[BUFFER][0]);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [READ](n, chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERSHIFT]();
+        else {
+            const c = chunk;
+            if (n === c.length || n === null)
+                this[BUFFERSHIFT]();
+            else if (typeof c === 'string') {
+                this[BUFFER][0] = c.slice(n);
+                chunk = c.slice(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+            else {
+                this[BUFFER][0] = c.subarray(n);
+                chunk = c.subarray(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+        }
+        this.emit('data', chunk);
+        if (!this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+        return chunk;
+    }
+    end(chunk, encoding, cb) {
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (chunk !== undefined)
+            this.write(chunk, encoding);
+        if (cb)
+            this.once('end', cb);
+        this[EOF] = true;
+        this.writable = false;
+        // if we haven't written anything, then go ahead and emit,
+        // even if we're not reading.
+        // we'll re-emit if a new 'end' listener is added anyway.
+        // This makes MP more suitable to write-only use cases.
+        if (this[FLOWING] || !this[PAUSED])
+            this[MAYBE_EMIT_END]();
+        return this;
+    }
+    // don't let the internal resume be overwritten
+    [RESUME]() {
+        if (this[DESTROYED])
+            return;
+        if (!this[DATALISTENERS] && !this[PIPES].length) {
+            this[DISCARDED] = true;
+        }
+        this[PAUSED] = false;
+        this[FLOWING] = true;
+        this.emit('resume');
+        if (this[BUFFER].length)
+            this[FLUSH]();
+        else if (this[EOF])
+            this[MAYBE_EMIT_END]();
+        else
+            this.emit('drain');
+    }
+    /**
+     * Resume the stream if it is currently in a paused state
+     *
+     * If called when there are no pipe destinations or `data` event listeners,
+     * this will place the stream in a "discarded" state, where all data will
+     * be thrown away. The discarded state is removed if a pipe destination or
+     * data handler is added, if pause() is called, or if any synchronous or
+     * asynchronous iteration is started.
+     */
+    resume() {
+        return this[RESUME]();
+    }
+    /**
+     * Pause the stream
+     */
+    pause() {
+        this[FLOWING] = false;
+        this[PAUSED] = true;
+        this[DISCARDED] = false;
+    }
+    /**
+     * true if the stream has been forcibly destroyed
+     */
+    get destroyed() {
+        return this[DESTROYED];
+    }
+    /**
+     * true if the stream is currently in a flowing state, meaning that
+     * any writes will be immediately emitted.
+     */
+    get flowing() {
+        return this[FLOWING];
+    }
+    /**
+     * true if the stream is currently in a paused state
+     */
+    get paused() {
+        return this[PAUSED];
+    }
+    [BUFFERPUSH](chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] += 1;
+        else
+            this[BUFFERLENGTH] += chunk.length;
+        this[BUFFER].push(chunk);
+    }
+    [BUFFERSHIFT]() {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] -= 1;
+        else
+            this[BUFFERLENGTH] -= this[BUFFER][0].length;
+        return this[BUFFER].shift();
+    }
+    [FLUSH](noDrain = false) {
+        do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
+            this[BUFFER].length);
+        if (!noDrain && !this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+    }
+    [FLUSHCHUNK](chunk) {
+        this.emit('data', chunk);
+        return this[FLOWING];
+    }
+    /**
+     * Pipe all data emitted by this stream into the destination provided.
+     *
+     * Triggers the flow of data.
+     */
+    pipe(dest, opts) {
+        if (this[DESTROYED])
+            return dest;
+        this[DISCARDED] = false;
+        const ended = this[EMITTED_END];
+        opts = opts || {};
+        if (dest === proc.stdout || dest === proc.stderr)
+            opts.end = false;
+        else
+            opts.end = opts.end !== false;
+        opts.proxyErrors = !!opts.proxyErrors;
+        // piping an ended stream ends immediately
+        if (ended) {
+            if (opts.end)
+                dest.end();
+        }
+        else {
+            // "as" here just ignores the WType, which pipes don't care about,
+            // since they're only consuming from us, and writing to the dest
+            this[PIPES].push(!opts.proxyErrors
+                ? new Pipe(this, dest, opts)
+                : new PipeProxyErrors(this, dest, opts));
+            if (this[ASYNC])
+                defer(() => this[RESUME]());
+            else
+                this[RESUME]();
+        }
+        return dest;
+    }
+    /**
+     * Fully unhook a piped destination stream.
+     *
+     * If the destination stream was the only consumer of this stream (ie,
+     * there are no other piped destinations or `'data'` event listeners)
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    unpipe(dest) {
+        const p = this[PIPES].find(p => p.dest === dest);
+        if (p) {
+            if (this[PIPES].length === 1) {
+                if (this[FLOWING] && this[DATALISTENERS] === 0) {
+                    this[FLOWING] = false;
+                }
+                this[PIPES] = [];
+            }
+            else
+                this[PIPES].splice(this[PIPES].indexOf(p), 1);
+            p.unpipe();
+        }
+    }
+    /**
+     * Alias for {@link Minipass#on}
+     */
+    addListener(ev, handler) {
+        return this.on(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.on`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * - Adding a 'data' event handler will trigger the flow of data
+     *
+     * - Adding a 'readable' event handler when there is data waiting to be read
+     *   will cause 'readable' to be emitted immediately.
+     *
+     * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
+     *   already passed will cause the event to be emitted immediately and all
+     *   handlers removed.
+     *
+     * - Adding an 'error' event handler after an error has been emitted will
+     *   cause the event to be re-emitted immediately with the error previously
+     *   raised.
+     */
+    on(ev, handler) {
+        const ret = super.on(ev, handler);
+        if (ev === 'data') {
+            this[DISCARDED] = false;
+            this[DATALISTENERS]++;
+            if (!this[PIPES].length && !this[FLOWING]) {
+                this[RESUME]();
+            }
+        }
+        else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
+            super.emit('readable');
+        }
+        else if (isEndish(ev) && this[EMITTED_END]) {
+            super.emit(ev);
+            this.removeAllListeners(ev);
+        }
+        else if (ev === 'error' && this[EMITTED_ERROR]) {
+            const h = handler;
+            if (this[ASYNC])
+                defer(() => h.call(this, this[EMITTED_ERROR]));
+            else
+                h.call(this, this[EMITTED_ERROR]);
+        }
+        return ret;
+    }
+    /**
+     * Alias for {@link Minipass#off}
+     */
+    removeListener(ev, handler) {
+        return this.off(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.off`
+     *
+     * If a 'data' event handler is removed, and it was the last consumer
+     * (ie, there are no pipe destinations or other 'data' event listeners),
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    off(ev, handler) {
+        const ret = super.off(ev, handler);
+        // if we previously had listeners, and now we don't, and we don't
+        // have any pipes, then stop the flow, unless it's been explicitly
+        // put in a discarded flowing state via stream.resume().
+        if (ev === 'data') {
+            this[DATALISTENERS] = this.listeners('data').length;
+            if (this[DATALISTENERS] === 0 &&
+                !this[DISCARDED] &&
+                !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * Mostly identical to `EventEmitter.removeAllListeners`
+     *
+     * If all 'data' event handlers are removed, and they were the last consumer
+     * (ie, there are no pipe destinations), then the flow of data will stop
+     * until there is another consumer or {@link Minipass#resume} is explicitly
+     * called.
+     */
+    removeAllListeners(ev) {
+        const ret = super.removeAllListeners(ev);
+        if (ev === 'data' || ev === undefined) {
+            this[DATALISTENERS] = 0;
+            if (!this[DISCARDED] && !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * true if the 'end' event has been emitted
+     */
+    get emittedEnd() {
+        return this[EMITTED_END];
+    }
+    [MAYBE_EMIT_END]() {
+        if (!this[EMITTING_END] &&
+            !this[EMITTED_END] &&
+            !this[DESTROYED] &&
+            this[BUFFER].length === 0 &&
+            this[EOF]) {
+            this[EMITTING_END] = true;
+            this.emit('end');
+            this.emit('prefinish');
+            this.emit('finish');
+            if (this[CLOSED])
+                this.emit('close');
+            this[EMITTING_END] = false;
+        }
+    }
+    /**
+     * Mostly identical to `EventEmitter.emit`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * If the stream has been destroyed, and the event is something other
+     * than 'close' or 'error', then `false` is returned and no handlers
+     * are called.
+     *
+     * If the event is 'end', and has already been emitted, then the event
+     * is ignored. If the stream is in a paused or non-flowing state, then
+     * the event will be deferred until data flow resumes. If the stream is
+     * async, then handlers will be called on the next tick rather than
+     * immediately.
+     *
+     * If the event is 'close', and 'end' has not yet been emitted, then
+     * the event will be deferred until after 'end' is emitted.
+     *
+     * If the event is 'error', and an AbortSignal was provided for the stream,
+     * and there are no listeners, then the event is ignored, matching the
+     * behavior of node core streams in the presense of an AbortSignal.
+     *
+     * If the event is 'finish' or 'prefinish', then all listeners will be
+     * removed after emitting the event, to prevent double-firing.
+     */
+    emit(ev, ...args) {
+        const data = args[0];
+        // error and close are only events allowed after calling destroy()
+        if (ev !== 'error' &&
+            ev !== 'close' &&
+            ev !== DESTROYED &&
+            this[DESTROYED]) {
+            return false;
+        }
+        else if (ev === 'data') {
+            return !this[OBJECTMODE] && !data
+                ? false
+                : this[ASYNC]
+                    ? (defer(() => this[EMITDATA](data)), true)
+                    : this[EMITDATA](data);
+        }
+        else if (ev === 'end') {
+            return this[EMITEND]();
+        }
+        else if (ev === 'close') {
+            this[CLOSED] = true;
+            // don't emit close before 'end' and 'finish'
+            if (!this[EMITTED_END] && !this[DESTROYED])
+                return false;
+            const ret = super.emit('close');
+            this.removeAllListeners('close');
+            return ret;
+        }
+        else if (ev === 'error') {
+            this[EMITTED_ERROR] = data;
+            super.emit(ERROR, data);
+            const ret = !this[SIGNAL] || this.listeners('error').length
+                ? super.emit('error', data)
+                : false;
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'resume') {
+            const ret = super.emit('resume');
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'finish' || ev === 'prefinish') {
+            const ret = super.emit(ev);
+            this.removeAllListeners(ev);
+            return ret;
+        }
+        // Some other unknown event
+        const ret = super.emit(ev, ...args);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITDATA](data) {
+        for (const p of this[PIPES]) {
+            if (p.dest.write(data) === false)
+                this.pause();
+        }
+        const ret = this[DISCARDED] ? false : super.emit('data', data);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITEND]() {
+        if (this[EMITTED_END])
+            return false;
+        this[EMITTED_END] = true;
+        this.readable = false;
+        return this[ASYNC]
+            ? (defer(() => this[EMITEND2]()), true)
+            : this[EMITEND2]();
+    }
+    [EMITEND2]() {
+        if (this[DECODER]) {
+            const data = this[DECODER].end();
+            if (data) {
+                for (const p of this[PIPES]) {
+                    p.dest.write(data);
+                }
+                if (!this[DISCARDED])
+                    super.emit('data', data);
+            }
+        }
+        for (const p of this[PIPES]) {
+            p.end();
+        }
+        const ret = super.emit('end');
+        this.removeAllListeners('end');
+        return ret;
+    }
+    /**
+     * Return a Promise that resolves to an array of all emitted data once
+     * the stream ends.
+     */
+    async collect() {
+        const buf = Object.assign([], {
+            dataLength: 0,
+        });
+        if (!this[OBJECTMODE])
+            buf.dataLength = 0;
+        // set the promise first, in case an error is raised
+        // by triggering the flow here.
+        const p = this.promise();
+        this.on('data', c => {
+            buf.push(c);
+            if (!this[OBJECTMODE])
+                buf.dataLength += c.length;
+        });
+        await p;
+        return buf;
+    }
+    /**
+     * Return a Promise that resolves to the concatenation of all emitted data
+     * once the stream ends.
+     *
+     * Not allowed on objectMode streams.
+     */
+    async concat() {
+        if (this[OBJECTMODE]) {
+            throw new Error('cannot concat in objectMode');
+        }
+        const buf = await this.collect();
+        return (this[ENCODING]
+            ? buf.join('')
+            : Buffer.concat(buf, buf.dataLength));
+    }
+    /**
+     * Return a void Promise that resolves once the stream ends.
+     */
+    async promise() {
+        return new Promise((resolve, reject) => {
+            this.on(DESTROYED, () => reject(new Error('stream destroyed')));
+            this.on('error', er => reject(er));
+            this.on('end', () => resolve());
+        });
+    }
+    /**
+     * Asynchronous `for await of` iteration.
+     *
+     * This will continue emitting all chunks until the stream terminates.
+     */
+    [Symbol.asyncIterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = async () => {
+            this.pause();
+            stopped = true;
+            return { value: undefined, done: true };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const res = this.read();
+            if (res !== null)
+                return Promise.resolve({ done: false, value: res });
+            if (this[EOF])
+                return stop();
+            let resolve;
+            let reject;
+            const onerr = (er) => {
+                this.off('data', ondata);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                reject(er);
+            };
+            const ondata = (value) => {
+                this.off('error', onerr);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                this.pause();
+                resolve({ value, done: !!this[EOF] });
+            };
+            const onend = () => {
+                this.off('error', onerr);
+                this.off('data', ondata);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                resolve({ done: true, value: undefined });
+            };
+            const ondestroy = () => onerr(new Error('stream destroyed'));
+            return new Promise((res, rej) => {
+                reject = rej;
+                resolve = res;
+                this.once(DESTROYED, ondestroy);
+                this.once('error', onerr);
+                this.once('end', onend);
+                this.once('data', ondata);
+            });
+        };
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.asyncIterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Synchronous `for of` iteration.
+     *
+     * The iteration will terminate when the internal buffer runs out, even
+     * if the stream has not yet terminated.
+     */
+    [Symbol.iterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = () => {
+            this.pause();
+            this.off(ERROR, stop);
+            this.off(DESTROYED, stop);
+            this.off('end', stop);
+            stopped = true;
+            return { done: true, value: undefined };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const value = this.read();
+            return value === null ? stop() : { done: false, value };
+        };
+        this.once('end', stop);
+        this.once(ERROR, stop);
+        this.once(DESTROYED, stop);
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.iterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Destroy a stream, preventing it from being used for any further purpose.
+     *
+     * If the stream has a `close()` method, then it will be called on
+     * destruction.
+     *
+     * After destruction, any attempt to write data, read data, or emit most
+     * events will be ignored.
+     *
+     * If an error argument is provided, then it will be emitted in an
+     * 'error' event.
+     */
+    destroy(er) {
+        if (this[DESTROYED]) {
+            if (er)
+                this.emit('error', er);
+            else
+                this.emit(DESTROYED);
+            return this;
+        }
+        this[DESTROYED] = true;
+        this[DISCARDED] = true;
+        // throw away all buffered data, it's never coming out
+        this[BUFFER].length = 0;
+        this[BUFFERLENGTH] = 0;
+        const wc = this;
+        if (typeof wc.close === 'function' && !this[CLOSED])
+            wc.close();
+        if (er)
+            this.emit('error', er);
+        // if no error to emit, still reject pending promises
+        else
+            this.emit(DESTROYED);
+        return this;
+    }
+    /**
+     * Alias for {@link isStream}
+     *
+     * Former export location, maintained for backwards compatibility.
+     *
+     * @deprecated
+     */
+    static get isStream() {
+        return exports.isStream;
+    }
+}
+exports.Minipass = Minipass;
+//# sourceMappingURL=index.js.map
+
+/***/ }),
+
+/***/ 6577:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.PathScurry = exports.Path = exports.PathScurryDarwin = exports.PathScurryPosix = exports.PathScurryWin32 = exports.PathScurryBase = exports.PathPosix = exports.PathWin32 = exports.PathBase = exports.ChildrenCache = exports.ResolveCache = void 0;
+const lru_cache_1 = __nccwpck_require__(2477);
+const node_path_1 = __nccwpck_require__(6760);
+const node_url_1 = __nccwpck_require__(3136);
+const fs_1 = __nccwpck_require__(9896);
+const actualFS = __importStar(__nccwpck_require__(3024));
+const realpathSync = fs_1.realpathSync.native;
+// TODO: test perf of fs/promises realpath vs realpathCB,
+// since the promises one uses realpath.native
+const promises_1 = __nccwpck_require__(1455);
+const minipass_1 = __nccwpck_require__(8275);
+const defaultFS = {
+    lstatSync: fs_1.lstatSync,
+    readdir: fs_1.readdir,
+    readdirSync: fs_1.readdirSync,
+    readlinkSync: fs_1.readlinkSync,
+    realpathSync,
+    promises: {
+        lstat: promises_1.lstat,
+        readdir: promises_1.readdir,
+        readlink: promises_1.readlink,
+        realpath: promises_1.realpath,
+    },
+};
+// if they just gave us require('fs') then use our default
+const fsFromOption = (fsOption) => !fsOption || fsOption === defaultFS || fsOption === actualFS ?
+    defaultFS
+    : {
+        ...defaultFS,
+        ...fsOption,
+        promises: {
+            ...defaultFS.promises,
+            ...(fsOption.promises || {}),
+        },
+    };
+// turn something like //?/c:/ into c:\
+const uncDriveRegexp = /^\\\\\?\\([a-z]:)\\?$/i;
+const uncToDrive = (rootPath) => rootPath.replace(/\//g, '\\').replace(uncDriveRegexp, '$1\\');
+// windows paths are separated by either / or \
+const eitherSep = /[\\\/]/;
+const UNKNOWN = 0; // may not even exist, for all we know
+const IFIFO = 0b0001;
+const IFCHR = 0b0010;
+const IFDIR = 0b0100;
+const IFBLK = 0b0110;
+const IFREG = 0b1000;
+const IFLNK = 0b1010;
+const IFSOCK = 0b1100;
+const IFMT = 0b1111;
+// mask to unset low 4 bits
+const IFMT_UNKNOWN = ~IFMT;
+// set after successfully calling readdir() and getting entries.
+const READDIR_CALLED = 0b0000_0001_0000;
+// set after a successful lstat()
+const LSTAT_CALLED = 0b0000_0010_0000;
+// set if an entry (or one of its parents) is definitely not a dir
+const ENOTDIR = 0b0000_0100_0000;
+// set if an entry (or one of its parents) does not exist
+// (can also be set on lstat errors like EACCES or ENAMETOOLONG)
+const ENOENT = 0b0000_1000_0000;
+// cannot have child entries -- also verify &IFMT is either IFDIR or IFLNK
+// set if we fail to readlink
+const ENOREADLINK = 0b0001_0000_0000;
+// set if we know realpath() will fail
+const ENOREALPATH = 0b0010_0000_0000;
+const ENOCHILD = ENOTDIR | ENOENT | ENOREALPATH;
+const TYPEMASK = 0b0011_1111_1111;
+const entToType = (s) => s.isFile() ? IFREG
+    : s.isDirectory() ? IFDIR
+        : s.isSymbolicLink() ? IFLNK
+            : s.isCharacterDevice() ? IFCHR
+                : s.isBlockDevice() ? IFBLK
+                    : s.isSocket() ? IFSOCK
+                        : s.isFIFO() ? IFIFO
+                            : UNKNOWN;
+// normalize unicode path names
+const normalizeCache = new Map();
+const normalize = (s) => {
+    const c = normalizeCache.get(s);
+    if (c)
+        return c;
+    const n = s.normalize('NFKD');
+    normalizeCache.set(s, n);
+    return n;
+};
+const normalizeNocaseCache = new Map();
+const normalizeNocase = (s) => {
+    const c = normalizeNocaseCache.get(s);
+    if (c)
+        return c;
+    const n = normalize(s.toLowerCase());
+    normalizeNocaseCache.set(s, n);
+    return n;
+};
+/**
+ * An LRUCache for storing resolved path strings or Path objects.
+ * @internal
+ */
+class ResolveCache extends lru_cache_1.LRUCache {
+    constructor() {
+        super({ max: 256 });
+    }
+}
+exports.ResolveCache = ResolveCache;
+// In order to prevent blowing out the js heap by allocating hundreds of
+// thousands of Path entries when walking extremely large trees, the "children"
+// in this tree are represented by storing an array of Path entries in an
+// LRUCache, indexed by the parent.  At any time, Path.children() may return an
+// empty array, indicating that it doesn't know about any of its children, and
+// thus has to rebuild that cache.  This is fine, it just means that we don't
+// benefit as much from having the cached entries, but huge directory walks
+// don't blow out the stack, and smaller ones are still as fast as possible.
+//
+//It does impose some complexity when building up the readdir data, because we
+//need to pass a reference to the children array that we started with.
+/**
+ * an LRUCache for storing child entries.
+ * @internal
+ */
+class ChildrenCache extends lru_cache_1.LRUCache {
+    constructor(maxSize = 16 * 1024) {
+        super({
+            maxSize,
+            // parent + children
+            sizeCalculation: a => a.length + 1,
+        });
+    }
+}
+exports.ChildrenCache = ChildrenCache;
+const setAsCwd = Symbol('PathScurry setAsCwd');
+/**
+ * Path objects are sort of like a super-powered
+ * {@link https://nodejs.org/docs/latest/api/fs.html#class-fsdirent fs.Dirent}
+ *
+ * Each one represents a single filesystem entry on disk, which may or may not
+ * exist. It includes methods for reading various types of information via
+ * lstat, readlink, and readdir, and caches all information to the greatest
+ * degree possible.
+ *
+ * Note that fs operations that would normally throw will instead return an
+ * "empty" value. This is in order to prevent excessive overhead from error
+ * stack traces.
+ */
+class PathBase {
+    /**
+     * the basename of this path
+     *
+     * **Important**: *always* test the path name against any test string
+     * usingthe {@link isNamed} method, and not by directly comparing this
+     * string. Otherwise, unicode path strings that the system sees as identical
+     * will not be properly treated as the same path, leading to incorrect
+     * behavior and possible security issues.
+     */
+    name;
+    /**
+     * the Path entry corresponding to the path root.
+     *
+     * @internal
+     */
+    root;
+    /**
+     * All roots found within the current PathScurry family
+     *
+     * @internal
+     */
+    roots;
+    /**
+     * a reference to the parent path, or undefined in the case of root entries
+     *
+     * @internal
+     */
+    parent;
+    /**
+     * boolean indicating whether paths are compared case-insensitively
+     * @internal
+     */
+    nocase;
+    /**
+     * boolean indicating that this path is the current working directory
+     * of the PathScurry collection that contains it.
+     */
+    isCWD = false;
+    // potential default fs override
+    #fs;
+    // Stats fields
+    #dev;
+    get dev() {
+        return this.#dev;
+    }
+    #mode;
+    get mode() {
+        return this.#mode;
+    }
+    #nlink;
+    get nlink() {
+        return this.#nlink;
+    }
+    #uid;
+    get uid() {
+        return this.#uid;
+    }
+    #gid;
+    get gid() {
+        return this.#gid;
+    }
+    #rdev;
+    get rdev() {
+        return this.#rdev;
+    }
+    #blksize;
+    get blksize() {
+        return this.#blksize;
+    }
+    #ino;
+    get ino() {
+        return this.#ino;
+    }
+    #size;
+    get size() {
+        return this.#size;
+    }
+    #blocks;
+    get blocks() {
+        return this.#blocks;
+    }
+    #atimeMs;
+    get atimeMs() {
+        return this.#atimeMs;
+    }
+    #mtimeMs;
+    get mtimeMs() {
+        return this.#mtimeMs;
+    }
+    #ctimeMs;
+    get ctimeMs() {
+        return this.#ctimeMs;
+    }
+    #birthtimeMs;
+    get birthtimeMs() {
+        return this.#birthtimeMs;
+    }
+    #atime;
+    get atime() {
+        return this.#atime;
+    }
+    #mtime;
+    get mtime() {
+        return this.#mtime;
+    }
+    #ctime;
+    get ctime() {
+        return this.#ctime;
+    }
+    #birthtime;
+    get birthtime() {
+        return this.#birthtime;
+    }
+    #matchName;
+    #depth;
+    #fullpath;
+    #fullpathPosix;
+    #relative;
+    #relativePosix;
+    #type;
+    #children;
+    #linkTarget;
+    #realpath;
+    /**
+     * This property is for compatibility with the Dirent class as of
+     * Node v20, where Dirent['parentPath'] refers to the path of the
+     * directory that was passed to readdir. For root entries, it's the path
+     * to the entry itself.
+     */
+    get parentPath() {
+        return (this.parent || this).fullpath();
+    }
+    /**
+     * Deprecated alias for Dirent['parentPath'] Somewhat counterintuitively,
+     * this property refers to the *parent* path, not the path object itself.
+     *
+     * @deprecated
+     */
+    get path() {
+        return this.parentPath;
+    }
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        this.name = name;
+        this.#matchName = nocase ? normalizeNocase(name) : normalize(name);
+        this.#type = type & TYPEMASK;
+        this.nocase = nocase;
+        this.roots = roots;
+        this.root = root || this;
+        this.#children = children;
+        this.#fullpath = opts.fullpath;
+        this.#relative = opts.relative;
+        this.#relativePosix = opts.relativePosix;
+        this.parent = opts.parent;
+        if (this.parent) {
+            this.#fs = this.parent.#fs;
+        }
+        else {
+            this.#fs = fsFromOption(opts.fs);
+        }
+    }
+    /**
+     * Returns the depth of the Path object from its root.
+     *
+     * For example, a path at `/foo/bar` would have a depth of 2.
+     */
+    depth() {
+        if (this.#depth !== undefined)
+            return this.#depth;
+        if (!this.parent)
+            return (this.#depth = 0);
+        return (this.#depth = this.parent.depth() + 1);
+    }
+    /**
+     * @internal
+     */
+    childrenCache() {
+        return this.#children;
+    }
+    /**
+     * Get the Path object referenced by the string path, resolved from this Path
+     */
+    resolve(path) {
+        if (!path) {
+            return this;
+        }
+        const rootPath = this.getRootString(path);
+        const dir = path.substring(rootPath.length);
+        const dirParts = dir.split(this.splitSep);
+        const result = rootPath ?
+            this.getRoot(rootPath).#resolveParts(dirParts)
+            : this.#resolveParts(dirParts);
+        return result;
+    }
+    #resolveParts(dirParts) {
+        let p = this;
+        for (const part of dirParts) {
+            p = p.child(part);
+        }
+        return p;
+    }
+    /**
+     * Returns the cached children Path objects, if still available.  If they
+     * have fallen out of the cache, then returns an empty array, and resets the
+     * READDIR_CALLED bit, so that future calls to readdir() will require an fs
+     * lookup.
+     *
+     * @internal
+     */
+    children() {
+        const cached = this.#children.get(this);
+        if (cached) {
+            return cached;
+        }
+        const children = Object.assign([], { provisional: 0 });
+        this.#children.set(this, children);
+        this.#type &= ~READDIR_CALLED;
+        return children;
+    }
+    /**
+     * Resolves a path portion and returns or creates the child Path.
+     *
+     * Returns `this` if pathPart is `''` or `'.'`, or `parent` if pathPart is
+     * `'..'`.
+     *
+     * This should not be called directly.  If `pathPart` contains any path
+     * separators, it will lead to unsafe undefined behavior.
+     *
+     * Use `Path.resolve()` instead.
+     *
+     * @internal
+     */
+    child(pathPart, opts) {
+        if (pathPart === '' || pathPart === '.') {
+            return this;
+        }
+        if (pathPart === '..') {
+            return this.parent || this;
+        }
+        // find the child
+        const children = this.children();
+        const name = this.nocase ? normalizeNocase(pathPart) : normalize(pathPart);
+        for (const p of children) {
+            if (p.#matchName === name) {
+                return p;
+            }
+        }
+        // didn't find it, create provisional child, since it might not
+        // actually exist.  If we know the parent isn't a dir, then
+        // in fact it CAN'T exist.
+        const s = this.parent ? this.sep : '';
+        const fullpath = this.#fullpath ? this.#fullpath + s + pathPart : undefined;
+        const pchild = this.newChild(pathPart, UNKNOWN, {
+            ...opts,
+            parent: this,
+            fullpath,
+        });
+        if (!this.canReaddir()) {
+            pchild.#type |= ENOENT;
+        }
+        // don't have to update provisional, because if we have real children,
+        // then provisional is set to children.length, otherwise a lower number
+        children.push(pchild);
+        return pchild;
+    }
+    /**
+     * The relative path from the cwd. If it does not share an ancestor with
+     * the cwd, then this ends up being equivalent to the fullpath()
+     */
+    relative() {
+        if (this.isCWD)
+            return '';
+        if (this.#relative !== undefined) {
+            return this.#relative;
+        }
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#relative = this.name);
+        }
+        const pv = p.relative();
+        return pv + (!pv || !p.parent ? '' : this.sep) + name;
+    }
+    /**
+     * The relative path from the cwd, using / as the path separator.
+     * If it does not share an ancestor with
+     * the cwd, then this ends up being equivalent to the fullpathPosix()
+     * On posix systems, this is identical to relative().
+     */
+    relativePosix() {
+        if (this.sep === '/')
+            return this.relative();
+        if (this.isCWD)
+            return '';
+        if (this.#relativePosix !== undefined)
+            return this.#relativePosix;
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#relativePosix = this.fullpathPosix());
+        }
+        const pv = p.relativePosix();
+        return pv + (!pv || !p.parent ? '' : '/') + name;
+    }
+    /**
+     * The fully resolved path string for this Path entry
+     */
+    fullpath() {
+        if (this.#fullpath !== undefined) {
+            return this.#fullpath;
+        }
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#fullpath = this.name);
+        }
+        const pv = p.fullpath();
+        const fp = pv + (!p.parent ? '' : this.sep) + name;
+        return (this.#fullpath = fp);
+    }
+    /**
+     * On platforms other than windows, this is identical to fullpath.
+     *
+     * On windows, this is overridden to return the forward-slash form of the
+     * full UNC path.
+     */
+    fullpathPosix() {
+        if (this.#fullpathPosix !== undefined)
+            return this.#fullpathPosix;
+        if (this.sep === '/')
+            return (this.#fullpathPosix = this.fullpath());
+        if (!this.parent) {
+            const p = this.fullpath().replace(/\\/g, '/');
+            if (/^[a-z]:\//i.test(p)) {
+                return (this.#fullpathPosix = `//?/${p}`);
+            }
+            else {
+                return (this.#fullpathPosix = p);
+            }
+        }
+        const p = this.parent;
+        const pfpp = p.fullpathPosix();
+        const fpp = pfpp + (!pfpp || !p.parent ? '' : '/') + this.name;
+        return (this.#fullpathPosix = fpp);
+    }
+    /**
+     * Is the Path of an unknown type?
+     *
+     * Note that we might know *something* about it if there has been a previous
+     * filesystem operation, for example that it does not exist, or is not a
+     * link, or whether it has child entries.
+     */
+    isUnknown() {
+        return (this.#type & IFMT) === UNKNOWN;
+    }
+    isType(type) {
+        return this[`is${type}`]();
+    }
+    getType() {
+        return (this.isUnknown() ? 'Unknown'
+            : this.isDirectory() ? 'Directory'
+                : this.isFile() ? 'File'
+                    : this.isSymbolicLink() ? 'SymbolicLink'
+                        : this.isFIFO() ? 'FIFO'
+                            : this.isCharacterDevice() ? 'CharacterDevice'
+                                : this.isBlockDevice() ? 'BlockDevice'
+                                    : /* c8 ignore start */ this.isSocket() ? 'Socket'
+                                        : 'Unknown');
+        /* c8 ignore stop */
+    }
+    /**
+     * Is the Path a regular file?
+     */
+    isFile() {
+        return (this.#type & IFMT) === IFREG;
+    }
+    /**
+     * Is the Path a directory?
+     */
+    isDirectory() {
+        return (this.#type & IFMT) === IFDIR;
+    }
+    /**
+     * Is the path a character device?
+     */
+    isCharacterDevice() {
+        return (this.#type & IFMT) === IFCHR;
+    }
+    /**
+     * Is the path a block device?
+     */
+    isBlockDevice() {
+        return (this.#type & IFMT) === IFBLK;
+    }
+    /**
+     * Is the path a FIFO pipe?
+     */
+    isFIFO() {
+        return (this.#type & IFMT) === IFIFO;
+    }
+    /**
+     * Is the path a socket?
+     */
+    isSocket() {
+        return (this.#type & IFMT) === IFSOCK;
+    }
+    /**
+     * Is the path a symbolic link?
+     */
+    isSymbolicLink() {
+        return (this.#type & IFLNK) === IFLNK;
+    }
+    /**
+     * Return the entry if it has been subject of a successful lstat, or
+     * undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* simply
+     * mean that we haven't called lstat on it.
+     */
+    lstatCached() {
+        return this.#type & LSTAT_CALLED ? this : undefined;
+    }
+    /**
+     * Return the cached link target if the entry has been the subject of a
+     * successful readlink, or undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * readlink() has been called at some point.
+     */
+    readlinkCached() {
+        return this.#linkTarget;
+    }
+    /**
+     * Returns the cached realpath target if the entry has been the subject
+     * of a successful realpath, or undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * realpath() has been called at some point.
+     */
+    realpathCached() {
+        return this.#realpath;
+    }
+    /**
+     * Returns the cached child Path entries array if the entry has been the
+     * subject of a successful readdir(), or [] otherwise.
+     *
+     * Does not read the filesystem, so an empty array *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * readdir() has been called recently enough to still be valid.
+     */
+    readdirCached() {
+        const children = this.children();
+        return children.slice(0, children.provisional);
+    }
+    /**
+     * Return true if it's worth trying to readlink.  Ie, we don't (yet) have
+     * any indication that readlink will definitely fail.
+     *
+     * Returns false if the path is known to not be a symlink, if a previous
+     * readlink failed, or if the entry does not exist.
+     */
+    canReadlink() {
+        if (this.#linkTarget)
+            return true;
+        if (!this.parent)
+            return false;
+        // cases where it cannot possibly succeed
+        const ifmt = this.#type & IFMT;
+        return !((ifmt !== UNKNOWN && ifmt !== IFLNK) ||
+            this.#type & ENOREADLINK ||
+            this.#type & ENOENT);
+    }
+    /**
+     * Return true if readdir has previously been successfully called on this
+     * path, indicating that cachedReaddir() is likely valid.
+     */
+    calledReaddir() {
+        return !!(this.#type & READDIR_CALLED);
+    }
+    /**
+     * Returns true if the path is known to not exist. That is, a previous lstat
+     * or readdir failed to verify its existence when that would have been
+     * expected, or a parent entry was marked either enoent or enotdir.
+     */
+    isENOENT() {
+        return !!(this.#type & ENOENT);
+    }
+    /**
+     * Return true if the path is a match for the given path name.  This handles
+     * case sensitivity and unicode normalization.
+     *
+     * Note: even on case-sensitive systems, it is **not** safe to test the
+     * equality of the `.name` property to determine whether a given pathname
+     * matches, due to unicode normalization mismatches.
+     *
+     * Always use this method instead of testing the `path.name` property
+     * directly.
+     */
+    isNamed(n) {
+        return !this.nocase ?
+            this.#matchName === normalize(n)
+            : this.#matchName === normalizeNocase(n);
+    }
+    /**
+     * Return the Path object corresponding to the target of a symbolic link.
+     *
+     * If the Path is not a symbolic link, or if the readlink call fails for any
+     * reason, `undefined` is returned.
+     *
+     * Result is cached, and thus may be outdated if the filesystem is mutated.
+     */
+    async readlink() {
+        const target = this.#linkTarget;
+        if (target) {
+            return target;
+        }
+        if (!this.canReadlink()) {
+            return undefined;
+        }
+        /* c8 ignore start */
+        // already covered by the canReadlink test, here for ts grumples
+        if (!this.parent) {
+            return undefined;
+        }
+        /* c8 ignore stop */
+        try {
+            const read = await this.#fs.promises.readlink(this.fullpath());
+            const linkTarget = (await this.parent.realpath())?.resolve(read);
+            if (linkTarget) {
+                return (this.#linkTarget = linkTarget);
+            }
+        }
+        catch (er) {
+            this.#readlinkFail(er.code);
+            return undefined;
+        }
+    }
+    /**
+     * Synchronous {@link PathBase.readlink}
+     */
+    readlinkSync() {
+        const target = this.#linkTarget;
+        if (target) {
+            return target;
+        }
+        if (!this.canReadlink()) {
+            return undefined;
+        }
+        /* c8 ignore start */
+        // already covered by the canReadlink test, here for ts grumples
+        if (!this.parent) {
+            return undefined;
+        }
+        /* c8 ignore stop */
+        try {
+            const read = this.#fs.readlinkSync(this.fullpath());
+            const linkTarget = this.parent.realpathSync()?.resolve(read);
+            if (linkTarget) {
+                return (this.#linkTarget = linkTarget);
+            }
+        }
+        catch (er) {
+            this.#readlinkFail(er.code);
+            return undefined;
+        }
+    }
+    #readdirSuccess(children) {
+        // succeeded, mark readdir called bit
+        this.#type |= READDIR_CALLED;
+        // mark all remaining provisional children as ENOENT
+        for (let p = children.provisional; p < children.length; p++) {
+            const c = children[p];
+            if (c)
+                c.#markENOENT();
+        }
+    }
+    #markENOENT() {
+        // mark as UNKNOWN and ENOENT
+        if (this.#type & ENOENT)
+            return;
+        this.#type = (this.#type | ENOENT) & IFMT_UNKNOWN;
+        this.#markChildrenENOENT();
+    }
+    #markChildrenENOENT() {
+        // all children are provisional and do not exist
+        const children = this.children();
+        children.provisional = 0;
+        for (const p of children) {
+            p.#markENOENT();
+        }
+    }
+    #markENOREALPATH() {
+        this.#type |= ENOREALPATH;
+        this.#markENOTDIR();
+    }
+    // save the information when we know the entry is not a dir
+    #markENOTDIR() {
+        // entry is not a directory, so any children can't exist.
+        // this *should* be impossible, since any children created
+        // after it's been marked ENOTDIR should be marked ENOENT,
+        // so it won't even get to this point.
+        /* c8 ignore start */
+        if (this.#type & ENOTDIR)
+            return;
+        /* c8 ignore stop */
+        let t = this.#type;
+        // this could happen if we stat a dir, then delete it,
+        // then try to read it or one of its children.
+        if ((t & IFMT) === IFDIR)
+            t &= IFMT_UNKNOWN;
+        this.#type = t | ENOTDIR;
+        this.#markChildrenENOENT();
+    }
+    #readdirFail(code = '') {
+        // markENOTDIR and markENOENT also set provisional=0
+        if (code === 'ENOTDIR' || code === 'EPERM') {
+            this.#markENOTDIR();
+        }
+        else if (code === 'ENOENT') {
+            this.#markENOENT();
+        }
+        else {
+            this.children().provisional = 0;
+        }
+    }
+    #lstatFail(code = '') {
+        // Windows just raises ENOENT in this case, disable for win CI
+        /* c8 ignore start */
+        if (code === 'ENOTDIR') {
+            // already know it has a parent by this point
+            const p = this.parent;
+            p.#markENOTDIR();
+        }
+        else if (code === 'ENOENT') {
+            /* c8 ignore stop */
+            this.#markENOENT();
+        }
+    }
+    #readlinkFail(code = '') {
+        let ter = this.#type;
+        ter |= ENOREADLINK;
+        if (code === 'ENOENT')
+            ter |= ENOENT;
+        // windows gets a weird error when you try to readlink a file
+        if (code === 'EINVAL' || code === 'UNKNOWN') {
+            // exists, but not a symlink, we don't know WHAT it is, so remove
+            // all IFMT bits.
+            ter &= IFMT_UNKNOWN;
+        }
+        this.#type = ter;
+        // windows just gets ENOENT in this case.  We do cover the case,
+        // just disabled because it's impossible on Windows CI
+        /* c8 ignore start */
+        if (code === 'ENOTDIR' && this.parent) {
+            this.parent.#markENOTDIR();
+        }
+        /* c8 ignore stop */
+    }
+    #readdirAddChild(e, c) {
+        return (this.#readdirMaybePromoteChild(e, c) ||
+            this.#readdirAddNewChild(e, c));
+    }
+    #readdirAddNewChild(e, c) {
+        // alloc new entry at head, so it's never provisional
+        const type = entToType(e);
+        const child = this.newChild(e.name, type, { parent: this });
+        const ifmt = child.#type & IFMT;
+        if (ifmt !== IFDIR && ifmt !== IFLNK && ifmt !== UNKNOWN) {
+            child.#type |= ENOTDIR;
+        }
+        c.unshift(child);
+        c.provisional++;
+        return child;
+    }
+    #readdirMaybePromoteChild(e, c) {
+        for (let p = c.provisional; p < c.length; p++) {
+            const pchild = c[p];
+            const name = this.nocase ? normalizeNocase(e.name) : normalize(e.name);
+            if (name !== pchild.#matchName) {
+                continue;
+            }
+            return this.#readdirPromoteChild(e, pchild, p, c);
+        }
+    }
+    #readdirPromoteChild(e, p, index, c) {
+        const v = p.name;
+        // retain any other flags, but set ifmt from dirent
+        p.#type = (p.#type & IFMT_UNKNOWN) | entToType(e);
+        // case sensitivity fixing when we learn the true name.
+        if (v !== e.name)
+            p.name = e.name;
+        // just advance provisional index (potentially off the list),
+        // otherwise we have to splice/pop it out and re-insert at head
+        if (index !== c.provisional) {
+            if (index === c.length - 1)
+                c.pop();
+            else
+                c.splice(index, 1);
+            c.unshift(p);
+        }
+        c.provisional++;
+        return p;
+    }
+    /**
+     * Call lstat() on this Path, and update all known information that can be
+     * determined.
+     *
+     * Note that unlike `fs.lstat()`, the returned value does not contain some
+     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
+     * information is required, you will need to call `fs.lstat` yourself.
+     *
+     * If the Path refers to a nonexistent file, or if the lstat call fails for
+     * any reason, `undefined` is returned.  Otherwise the updated Path object is
+     * returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async lstat() {
+        if ((this.#type & ENOENT) === 0) {
+            try {
+                this.#applyStat(await this.#fs.promises.lstat(this.fullpath()));
+                return this;
+            }
+            catch (er) {
+                this.#lstatFail(er.code);
+            }
+        }
+    }
+    /**
+     * synchronous {@link PathBase.lstat}
+     */
+    lstatSync() {
+        if ((this.#type & ENOENT) === 0) {
+            try {
+                this.#applyStat(this.#fs.lstatSync(this.fullpath()));
+                return this;
+            }
+            catch (er) {
+                this.#lstatFail(er.code);
+            }
+        }
+    }
+    #applyStat(st) {
+        const { atime, atimeMs, birthtime, birthtimeMs, blksize, blocks, ctime, ctimeMs, dev, gid, ino, mode, mtime, mtimeMs, nlink, rdev, size, uid, } = st;
+        this.#atime = atime;
+        this.#atimeMs = atimeMs;
+        this.#birthtime = birthtime;
+        this.#birthtimeMs = birthtimeMs;
+        this.#blksize = blksize;
+        this.#blocks = blocks;
+        this.#ctime = ctime;
+        this.#ctimeMs = ctimeMs;
+        this.#dev = dev;
+        this.#gid = gid;
+        this.#ino = ino;
+        this.#mode = mode;
+        this.#mtime = mtime;
+        this.#mtimeMs = mtimeMs;
+        this.#nlink = nlink;
+        this.#rdev = rdev;
+        this.#size = size;
+        this.#uid = uid;
+        const ifmt = entToType(st);
+        // retain any other flags, but set the ifmt
+        this.#type = (this.#type & IFMT_UNKNOWN) | ifmt | LSTAT_CALLED;
+        if (ifmt !== UNKNOWN && ifmt !== IFDIR && ifmt !== IFLNK) {
+            this.#type |= ENOTDIR;
+        }
+    }
+    #onReaddirCB = [];
+    #readdirCBInFlight = false;
+    #callOnReaddirCB(children) {
+        this.#readdirCBInFlight = false;
+        const cbs = this.#onReaddirCB.slice();
+        this.#onReaddirCB.length = 0;
+        cbs.forEach(cb => cb(null, children));
+    }
+    /**
+     * Standard node-style callback interface to get list of directory entries.
+     *
+     * If the Path cannot or does not contain any children, then an empty array
+     * is returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     *
+     * @param cb The callback called with (er, entries).  Note that the `er`
+     * param is somewhat extraneous, as all readdir() errors are handled and
+     * simply result in an empty set of entries being returned.
+     * @param allowZalgo Boolean indicating that immediately known results should
+     * *not* be deferred with `queueMicrotask`. Defaults to `false`. Release
+     * zalgo at your peril, the dark pony lord is devious and unforgiving.
+     */
+    readdirCB(cb, allowZalgo = false) {
+        if (!this.canReaddir()) {
+            if (allowZalgo)
+                cb(null, []);
+            else
+                queueMicrotask(() => cb(null, []));
+            return;
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            const c = children.slice(0, children.provisional);
+            if (allowZalgo)
+                cb(null, c);
+            else
+                queueMicrotask(() => cb(null, c));
+            return;
+        }
+        // don't have to worry about zalgo at this point.
+        this.#onReaddirCB.push(cb);
+        if (this.#readdirCBInFlight) {
+            return;
+        }
+        this.#readdirCBInFlight = true;
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        this.#fs.readdir(fullpath, { withFileTypes: true }, (er, entries) => {
+            if (er) {
+                this.#readdirFail(er.code);
+                children.provisional = 0;
+            }
+            else {
+                // if we didn't get an error, we always get entries.
+                //@ts-ignore
+                for (const e of entries) {
+                    this.#readdirAddChild(e, children);
+                }
+                this.#readdirSuccess(children);
+            }
+            this.#callOnReaddirCB(children.slice(0, children.provisional));
+            return;
+        });
+    }
+    #asyncReaddirInFlight;
+    /**
+     * Return an array of known child entries.
+     *
+     * If the Path cannot or does not contain any children, then an empty array
+     * is returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async readdir() {
+        if (!this.canReaddir()) {
+            return [];
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            return children.slice(0, children.provisional);
+        }
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        if (this.#asyncReaddirInFlight) {
+            await this.#asyncReaddirInFlight;
+        }
+        else {
+            /* c8 ignore start */
+            let resolve = () => { };
+            /* c8 ignore stop */
+            this.#asyncReaddirInFlight = new Promise(res => (resolve = res));
+            try {
+                for (const e of await this.#fs.promises.readdir(fullpath, {
+                    withFileTypes: true,
+                })) {
+                    this.#readdirAddChild(e, children);
+                }
+                this.#readdirSuccess(children);
+            }
+            catch (er) {
+                this.#readdirFail(er.code);
+                children.provisional = 0;
+            }
+            this.#asyncReaddirInFlight = undefined;
+            resolve();
+        }
+        return children.slice(0, children.provisional);
+    }
+    /**
+     * synchronous {@link PathBase.readdir}
+     */
+    readdirSync() {
+        if (!this.canReaddir()) {
+            return [];
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            return children.slice(0, children.provisional);
+        }
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        try {
+            for (const e of this.#fs.readdirSync(fullpath, {
+                withFileTypes: true,
+            })) {
+                this.#readdirAddChild(e, children);
+            }
+            this.#readdirSuccess(children);
+        }
+        catch (er) {
+            this.#readdirFail(er.code);
+            children.provisional = 0;
+        }
+        return children.slice(0, children.provisional);
+    }
+    canReaddir() {
+        if (this.#type & ENOCHILD)
+            return false;
+        const ifmt = IFMT & this.#type;
+        // we always set ENOTDIR when setting IFMT, so should be impossible
+        /* c8 ignore start */
+        if (!(ifmt === UNKNOWN || ifmt === IFDIR || ifmt === IFLNK)) {
+            return false;
+        }
+        /* c8 ignore stop */
+        return true;
+    }
+    shouldWalk(dirs, walkFilter) {
+        return ((this.#type & IFDIR) === IFDIR &&
+            !(this.#type & ENOCHILD) &&
+            !dirs.has(this) &&
+            (!walkFilter || walkFilter(this)));
+    }
+    /**
+     * Return the Path object corresponding to path as resolved
+     * by realpath(3).
+     *
+     * If the realpath call fails for any reason, `undefined` is returned.
+     *
+     * Result is cached, and thus may be outdated if the filesystem is mutated.
+     * On success, returns a Path object.
+     */
+    async realpath() {
+        if (this.#realpath)
+            return this.#realpath;
+        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
+            return undefined;
+        try {
+            const rp = await this.#fs.promises.realpath(this.fullpath());
+            return (this.#realpath = this.resolve(rp));
+        }
+        catch (_) {
+            this.#markENOREALPATH();
+        }
+    }
+    /**
+     * Synchronous {@link realpath}
+     */
+    realpathSync() {
+        if (this.#realpath)
+            return this.#realpath;
+        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
+            return undefined;
+        try {
+            const rp = this.#fs.realpathSync(this.fullpath());
+            return (this.#realpath = this.resolve(rp));
+        }
+        catch (_) {
+            this.#markENOREALPATH();
+        }
+    }
+    /**
+     * Internal method to mark this Path object as the scurry cwd,
+     * called by {@link PathScurry#chdir}
+     *
+     * @internal
+     */
+    [setAsCwd](oldCwd) {
+        if (oldCwd === this)
+            return;
+        oldCwd.isCWD = false;
+        this.isCWD = true;
+        const changed = new Set([]);
+        let rp = [];
+        let p = this;
+        while (p && p.parent) {
+            changed.add(p);
+            p.#relative = rp.join(this.sep);
+            p.#relativePosix = rp.join('/');
+            p = p.parent;
+            rp.push('..');
+        }
+        // now un-memoize parents of old cwd
+        p = oldCwd;
+        while (p && p.parent && !changed.has(p)) {
+            p.#relative = undefined;
+            p.#relativePosix = undefined;
+            p = p.parent;
+        }
+    }
+}
+exports.PathBase = PathBase;
+/**
+ * Path class used on win32 systems
+ *
+ * Uses `'\\'` as the path separator for returned paths, either `'\\'` or `'/'`
+ * as the path separator for parsing paths.
+ */
+class PathWin32 extends PathBase {
+    /**
+     * Separator for generating path strings.
+     */
+    sep = '\\';
+    /**
+     * Separator for parsing path strings.
+     */
+    splitSep = eitherSep;
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        super(name, type, root, roots, nocase, children, opts);
+    }
+    /**
+     * @internal
+     */
+    newChild(name, type = UNKNOWN, opts = {}) {
+        return new PathWin32(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
+    }
+    /**
+     * @internal
+     */
+    getRootString(path) {
+        return node_path_1.win32.parse(path).root;
+    }
+    /**
+     * @internal
+     */
+    getRoot(rootPath) {
+        rootPath = uncToDrive(rootPath.toUpperCase());
+        if (rootPath === this.root.name) {
+            return this.root;
+        }
+        // ok, not that one, check if it matches another we know about
+        for (const [compare, root] of Object.entries(this.roots)) {
+            if (this.sameRoot(rootPath, compare)) {
+                return (this.roots[rootPath] = root);
+            }
+        }
+        // otherwise, have to create a new one.
+        return (this.roots[rootPath] = new PathScurryWin32(rootPath, this).root);
+    }
+    /**
+     * @internal
+     */
+    sameRoot(rootPath, compare = this.root.name) {
+        // windows can (rarely) have case-sensitive filesystem, but
+        // UNC and drive letters are always case-insensitive, and canonically
+        // represented uppercase.
+        rootPath = rootPath
+            .toUpperCase()
+            .replace(/\//g, '\\')
+            .replace(uncDriveRegexp, '$1\\');
+        return rootPath === compare;
+    }
+}
+exports.PathWin32 = PathWin32;
+/**
+ * Path class used on all posix systems.
+ *
+ * Uses `'/'` as the path separator.
+ */
+class PathPosix extends PathBase {
+    /**
+     * separator for parsing path strings
+     */
+    splitSep = '/';
+    /**
+     * separator for generating path strings
+     */
+    sep = '/';
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        super(name, type, root, roots, nocase, children, opts);
+    }
+    /**
+     * @internal
+     */
+    getRootString(path) {
+        return path.startsWith('/') ? '/' : '';
+    }
+    /**
+     * @internal
+     */
+    getRoot(_rootPath) {
+        return this.root;
+    }
+    /**
+     * @internal
+     */
+    newChild(name, type = UNKNOWN, opts = {}) {
+        return new PathPosix(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
+    }
+}
+exports.PathPosix = PathPosix;
+/**
+ * The base class for all PathScurry classes, providing the interface for path
+ * resolution and filesystem operations.
+ *
+ * Typically, you should *not* instantiate this class directly, but rather one
+ * of the platform-specific classes, or the exported {@link PathScurry} which
+ * defaults to the current platform.
+ */
+class PathScurryBase {
+    /**
+     * The root Path entry for the current working directory of this Scurry
+     */
+    root;
+    /**
+     * The string path for the root of this Scurry's current working directory
+     */
+    rootPath;
+    /**
+     * A collection of all roots encountered, referenced by rootPath
+     */
+    roots;
+    /**
+     * The Path entry corresponding to this PathScurry's current working directory.
+     */
+    cwd;
+    #resolveCache;
+    #resolvePosixCache;
+    #children;
+    /**
+     * Perform path comparisons case-insensitively.
+     *
+     * Defaults true on Darwin and Windows systems, false elsewhere.
+     */
+    nocase;
+    #fs;
+    /**
+     * This class should not be instantiated directly.
+     *
+     * Use PathScurryWin32, PathScurryDarwin, PathScurryPosix, or PathScurry
+     *
+     * @internal
+     */
+    constructor(cwd = process.cwd(), pathImpl, sep, { nocase, childrenCacheSize = 16 * 1024, fs = defaultFS, } = {}) {
+        this.#fs = fsFromOption(fs);
+        if (cwd instanceof URL || cwd.startsWith('file://')) {
+            cwd = (0, node_url_1.fileURLToPath)(cwd);
+        }
+        // resolve and split root, and then add to the store.
+        // this is the only time we call path.resolve()
+        const cwdPath = pathImpl.resolve(cwd);
+        this.roots = Object.create(null);
+        this.rootPath = this.parseRootPath(cwdPath);
+        this.#resolveCache = new ResolveCache();
+        this.#resolvePosixCache = new ResolveCache();
+        this.#children = new ChildrenCache(childrenCacheSize);
+        const split = cwdPath.substring(this.rootPath.length).split(sep);
+        // resolve('/') leaves '', splits to [''], we don't want that.
+        if (split.length === 1 && !split[0]) {
+            split.pop();
+        }
+        /* c8 ignore start */
+        if (nocase === undefined) {
+            throw new TypeError('must provide nocase setting to PathScurryBase ctor');
+        }
+        /* c8 ignore stop */
+        this.nocase = nocase;
+        this.root = this.newRoot(this.#fs);
+        this.roots[this.rootPath] = this.root;
+        let prev = this.root;
+        let len = split.length - 1;
+        const joinSep = pathImpl.sep;
+        let abs = this.rootPath;
+        let sawFirst = false;
+        for (const part of split) {
+            const l = len--;
+            prev = prev.child(part, {
+                relative: new Array(l).fill('..').join(joinSep),
+                relativePosix: new Array(l).fill('..').join('/'),
+                fullpath: (abs += (sawFirst ? '' : joinSep) + part),
+            });
+            sawFirst = true;
+        }
+        this.cwd = prev;
+    }
+    /**
+     * Get the depth of a provided path, string, or the cwd
+     */
+    depth(path = this.cwd) {
+        if (typeof path === 'string') {
+            path = this.cwd.resolve(path);
+        }
+        return path.depth();
+    }
+    /**
+     * Return the cache of child entries.  Exposed so subclasses can create
+     * child Path objects in a platform-specific way.
+     *
+     * @internal
+     */
+    childrenCache() {
+        return this.#children;
+    }
+    /**
+     * Resolve one or more path strings to a resolved string
+     *
+     * Same interface as require('path').resolve.
+     *
+     * Much faster than path.resolve() when called multiple times for the same
+     * path, because the resolved Path objects are cached.  Much slower
+     * otherwise.
+     */
+    resolve(...paths) {
+        // first figure out the minimum number of paths we have to test
+        // we always start at cwd, but any absolutes will bump the start
+        let r = '';
+        for (let i = paths.length - 1; i >= 0; i--) {
+            const p = paths[i];
+            if (!p || p === '.')
+                continue;
+            r = r ? `${p}/${r}` : p;
+            if (this.isAbsolute(p)) {
+                break;
+            }
+        }
+        const cached = this.#resolveCache.get(r);
+        if (cached !== undefined) {
+            return cached;
+        }
+        const result = this.cwd.resolve(r).fullpath();
+        this.#resolveCache.set(r, result);
+        return result;
+    }
+    /**
+     * Resolve one or more path strings to a resolved string, returning
+     * the posix path.  Identical to .resolve() on posix systems, but on
+     * windows will return a forward-slash separated UNC path.
+     *
+     * Same interface as require('path').resolve.
+     *
+     * Much faster than path.resolve() when called multiple times for the same
+     * path, because the resolved Path objects are cached.  Much slower
+     * otherwise.
+     */
+    resolvePosix(...paths) {
+        // first figure out the minimum number of paths we have to test
+        // we always start at cwd, but any absolutes will bump the start
+        let r = '';
+        for (let i = paths.length - 1; i >= 0; i--) {
+            const p = paths[i];
+            if (!p || p === '.')
+                continue;
+            r = r ? `${p}/${r}` : p;
+            if (this.isAbsolute(p)) {
+                break;
+            }
+        }
+        const cached = this.#resolvePosixCache.get(r);
+        if (cached !== undefined) {
+            return cached;
+        }
+        const result = this.cwd.resolve(r).fullpathPosix();
+        this.#resolvePosixCache.set(r, result);
+        return result;
+    }
+    /**
+     * find the relative path from the cwd to the supplied path string or entry
+     */
+    relative(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.relative();
+    }
+    /**
+     * find the relative path from the cwd to the supplied path string or
+     * entry, using / as the path delimiter, even on Windows.
+     */
+    relativePosix(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.relativePosix();
+    }
+    /**
+     * Return the basename for the provided string or Path object
+     */
+    basename(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.name;
+    }
+    /**
+     * Return the dirname for the provided string or Path object
+     */
+    dirname(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return (entry.parent || entry).fullpath();
+    }
+    async readdir(entry = this.cwd, opts = {
+        withFileTypes: true,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes } = opts;
+        if (!entry.canReaddir()) {
+            return [];
+        }
+        else {
+            const p = await entry.readdir();
+            return withFileTypes ? p : p.map(e => e.name);
+        }
+    }
+    readdirSync(entry = this.cwd, opts = {
+        withFileTypes: true,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true } = opts;
+        if (!entry.canReaddir()) {
+            return [];
+        }
+        else if (withFileTypes) {
+            return entry.readdirSync();
+        }
+        else {
+            return entry.readdirSync().map(e => e.name);
+        }
+    }
+    /**
+     * Call lstat() on the string or Path object, and update all known
+     * information that can be determined.
+     *
+     * Note that unlike `fs.lstat()`, the returned value does not contain some
+     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
+     * information is required, you will need to call `fs.lstat` yourself.
+     *
+     * If the Path refers to a nonexistent file, or if the lstat call fails for
+     * any reason, `undefined` is returned.  Otherwise the updated Path object is
+     * returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async lstat(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.lstat();
+    }
+    /**
+     * synchronous {@link PathScurryBase.lstat}
+     */
+    lstatSync(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.lstatSync();
+    }
+    async readlink(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = await entry.readlink();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    readlinkSync(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = entry.readlinkSync();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    async realpath(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = await entry.realpath();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    realpathSync(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = entry.realpathSync();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    async walk(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = [];
+        if (!filter || filter(entry)) {
+            results.push(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set();
+        const walk = (dir, cb) => {
+            dirs.add(dir);
+            dir.readdirCB((er, entries) => {
+                /* c8 ignore start */
+                if (er) {
+                    return cb(er);
+                }
+                /* c8 ignore stop */
+                let len = entries.length;
+                if (!len)
+                    return cb();
+                const next = () => {
+                    if (--len === 0) {
+                        cb();
+                    }
+                };
+                for (const e of entries) {
+                    if (!filter || filter(e)) {
+                        results.push(withFileTypes ? e : e.fullpath());
+                    }
+                    if (follow && e.isSymbolicLink()) {
+                        e.realpath()
+                            .then(r => (r?.isUnknown() ? r.lstat() : r))
+                            .then(r => r?.shouldWalk(dirs, walkFilter) ? walk(r, next) : next());
+                    }
+                    else {
+                        if (e.shouldWalk(dirs, walkFilter)) {
+                            walk(e, next);
+                        }
+                        else {
+                            next();
+                        }
+                    }
+                }
+            }, true); // zalgooooooo
+        };
+        const start = entry;
+        return new Promise((res, rej) => {
+            walk(start, er => {
+                /* c8 ignore start */
+                if (er)
+                    return rej(er);
+                /* c8 ignore stop */
+                res(results);
+            });
+        });
+    }
+    walkSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = [];
+        if (!filter || filter(entry)) {
+            results.push(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set([entry]);
+        for (const dir of dirs) {
+            const entries = dir.readdirSync();
+            for (const e of entries) {
+                if (!filter || filter(e)) {
+                    results.push(withFileTypes ? e : e.fullpath());
+                }
+                let r = e;
+                if (e.isSymbolicLink()) {
+                    if (!(follow && (r = e.realpathSync())))
+                        continue;
+                    if (r.isUnknown())
+                        r.lstatSync();
+                }
+                if (r.shouldWalk(dirs, walkFilter)) {
+                    dirs.add(r);
+                }
+            }
+        }
+        return results;
+    }
+    /**
+     * Support for `for await`
+     *
+     * Alias for {@link PathScurryBase.iterate}
+     *
+     * Note: As of Node 19, this is very slow, compared to other methods of
+     * walking.  Consider using {@link PathScurryBase.stream} if memory overhead
+     * and backpressure are concerns, or {@link PathScurryBase.walk} if not.
+     */
+    [Symbol.asyncIterator]() {
+        return this.iterate();
+    }
+    iterate(entry = this.cwd, options = {}) {
+        // iterating async over the stream is significantly more performant,
+        // especially in the warm-cache scenario, because it buffers up directory
+        // entries in the background instead of waiting for a yield for each one.
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            options = entry;
+            entry = this.cwd;
+        }
+        return this.stream(entry, options)[Symbol.asyncIterator]();
+    }
+    /**
+     * Iterating over a PathScurry performs a synchronous walk.
+     *
+     * Alias for {@link PathScurryBase.iterateSync}
+     */
+    [Symbol.iterator]() {
+        return this.iterateSync();
+    }
+    *iterateSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        if (!filter || filter(entry)) {
+            yield withFileTypes ? entry : entry.fullpath();
+        }
+        const dirs = new Set([entry]);
+        for (const dir of dirs) {
+            const entries = dir.readdirSync();
+            for (const e of entries) {
+                if (!filter || filter(e)) {
+                    yield withFileTypes ? e : e.fullpath();
+                }
+                let r = e;
+                if (e.isSymbolicLink()) {
+                    if (!(follow && (r = e.realpathSync())))
+                        continue;
+                    if (r.isUnknown())
+                        r.lstatSync();
+                }
+                if (r.shouldWalk(dirs, walkFilter)) {
+                    dirs.add(r);
+                }
+            }
+        }
+    }
+    stream(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = new minipass_1.Minipass({ objectMode: true });
+        if (!filter || filter(entry)) {
+            results.write(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set();
+        const queue = [entry];
+        let processing = 0;
+        const process = () => {
+            let paused = false;
+            while (!paused) {
+                const dir = queue.shift();
+                if (!dir) {
+                    if (processing === 0)
+                        results.end();
+                    return;
+                }
+                processing++;
+                dirs.add(dir);
+                const onReaddir = (er, entries, didRealpaths = false) => {
+                    /* c8 ignore start */
+                    if (er)
+                        return results.emit('error', er);
+                    /* c8 ignore stop */
+                    if (follow && !didRealpaths) {
+                        const promises = [];
+                        for (const e of entries) {
+                            if (e.isSymbolicLink()) {
+                                promises.push(e
+                                    .realpath()
+                                    .then((r) => r?.isUnknown() ? r.lstat() : r));
+                            }
+                        }
+                        if (promises.length) {
+                            Promise.all(promises).then(() => onReaddir(null, entries, true));
+                            return;
+                        }
+                    }
+                    for (const e of entries) {
+                        if (e && (!filter || filter(e))) {
+                            if (!results.write(withFileTypes ? e : e.fullpath())) {
+                                paused = true;
+                            }
+                        }
+                    }
+                    processing--;
+                    for (const e of entries) {
+                        const r = e.realpathCached() || e;
+                        if (r.shouldWalk(dirs, walkFilter)) {
+                            queue.push(r);
+                        }
+                    }
+                    if (paused && !results.flowing) {
+                        results.once('drain', process);
+                    }
+                    else if (!sync) {
+                        process();
+                    }
+                };
+                // zalgo containment
+                let sync = true;
+                dir.readdirCB(onReaddir, true);
+                sync = false;
+            }
+        };
+        process();
+        return results;
+    }
+    streamSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = new minipass_1.Minipass({ objectMode: true });
+        const dirs = new Set();
+        if (!filter || filter(entry)) {
+            results.write(withFileTypes ? entry : entry.fullpath());
+        }
+        const queue = [entry];
+        let processing = 0;
+        const process = () => {
+            let paused = false;
+            while (!paused) {
+                const dir = queue.shift();
+                if (!dir) {
+                    if (processing === 0)
+                        results.end();
+                    return;
+                }
+                processing++;
+                dirs.add(dir);
+                const entries = dir.readdirSync();
+                for (const e of entries) {
+                    if (!filter || filter(e)) {
+                        if (!results.write(withFileTypes ? e : e.fullpath())) {
+                            paused = true;
+                        }
+                    }
+                }
+                processing--;
+                for (const e of entries) {
+                    let r = e;
+                    if (e.isSymbolicLink()) {
+                        if (!(follow && (r = e.realpathSync())))
+                            continue;
+                        if (r.isUnknown())
+                            r.lstatSync();
+                    }
+                    if (r.shouldWalk(dirs, walkFilter)) {
+                        queue.push(r);
+                    }
+                }
+            }
+            if (paused && !results.flowing)
+                results.once('drain', process);
+        };
+        process();
+        return results;
+    }
+    chdir(path = this.cwd) {
+        const oldCwd = this.cwd;
+        this.cwd = typeof path === 'string' ? this.cwd.resolve(path) : path;
+        this.cwd[setAsCwd](oldCwd);
+    }
+}
+exports.PathScurryBase = PathScurryBase;
+/**
+ * Windows implementation of {@link PathScurryBase}
+ *
+ * Defaults to case insensitve, uses `'\\'` to generate path strings.  Uses
+ * {@link PathWin32} for Path objects.
+ */
+class PathScurryWin32 extends PathScurryBase {
+    /**
+     * separator for generating path strings
+     */
+    sep = '\\';
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = true } = opts;
+        super(cwd, node_path_1.win32, '\\', { ...opts, nocase });
+        this.nocase = nocase;
+        for (let p = this.cwd; p; p = p.parent) {
+            p.nocase = this.nocase;
+        }
+    }
+    /**
+     * @internal
+     */
+    parseRootPath(dir) {
+        // if the path starts with a single separator, it's not a UNC, and we'll
+        // just get separator as the root, and driveFromUNC will return \
+        // In that case, mount \ on the root from the cwd.
+        return node_path_1.win32.parse(dir).root.toUpperCase();
+    }
+    /**
+     * @internal
+     */
+    newRoot(fs) {
+        return new PathWin32(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
+    }
+    /**
+     * Return true if the provided path string is an absolute path
+     */
+    isAbsolute(p) {
+        return (p.startsWith('/') || p.startsWith('\\') || /^[a-z]:(\/|\\)/i.test(p));
+    }
+}
+exports.PathScurryWin32 = PathScurryWin32;
+/**
+ * {@link PathScurryBase} implementation for all posix systems other than Darwin.
+ *
+ * Defaults to case-sensitive matching, uses `'/'` to generate path strings.
+ *
+ * Uses {@link PathPosix} for Path objects.
+ */
+class PathScurryPosix extends PathScurryBase {
+    /**
+     * separator for generating path strings
+     */
+    sep = '/';
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = false } = opts;
+        super(cwd, node_path_1.posix, '/', { ...opts, nocase });
+        this.nocase = nocase;
+    }
+    /**
+     * @internal
+     */
+    parseRootPath(_dir) {
+        return '/';
+    }
+    /**
+     * @internal
+     */
+    newRoot(fs) {
+        return new PathPosix(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
+    }
+    /**
+     * Return true if the provided path string is an absolute path
+     */
+    isAbsolute(p) {
+        return p.startsWith('/');
+    }
+}
+exports.PathScurryPosix = PathScurryPosix;
+/**
+ * {@link PathScurryBase} implementation for Darwin (macOS) systems.
+ *
+ * Defaults to case-insensitive matching, uses `'/'` for generating path
+ * strings.
+ *
+ * Uses {@link PathPosix} for Path objects.
+ */
+class PathScurryDarwin extends PathScurryPosix {
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = true } = opts;
+        super(cwd, { ...opts, nocase });
+    }
+}
+exports.PathScurryDarwin = PathScurryDarwin;
+/**
+ * Default {@link PathBase} implementation for the current platform.
+ *
+ * {@link PathWin32} on Windows systems, {@link PathPosix} on all others.
+ */
+exports.Path = process.platform === 'win32' ? PathWin32 : PathPosix;
+/**
+ * Default {@link PathScurryBase} implementation for the current platform.
+ *
+ * {@link PathScurryWin32} on Windows systems, {@link PathScurryDarwin} on
+ * Darwin (macOS) systems, {@link PathScurryPosix} on all others.
+ */
+exports.PathScurry = process.platform === 'win32' ? PathScurryWin32
+    : process.platform === 'darwin' ? PathScurryDarwin
+        : PathScurryPosix;
+//# sourceMappingURL=index.js.map
+
+/***/ }),
+
+/***/ 4456:
+/***/ ((module) => {
+
+"use strict";
+module.exports = /*#__PURE__*/JSON.parse('{"name":"@aws-sdk/client-secrets-manager","description":"AWS SDK for JavaScript Secrets Manager Client for Node.js, Browser and React Native","version":"3.716.0","scripts":{"build":"concurrently \'yarn:build:cjs\' \'yarn:build:es\' \'yarn:build:types\'","build:cjs":"node ../../scripts/compilation/inline client-secrets-manager","build:es":"tsc -p tsconfig.es.json","build:include:deps":"lerna run --scope $npm_package_name --include-dependencies build","build:types":"tsc -p tsconfig.types.json","build:types:downlevel":"downlevel-dts dist-types dist-types/ts3.4","clean":"rimraf ./dist-* && rimraf *.tsbuildinfo","extract:docs":"api-extractor run --local","generate:client":"node ../../scripts/generate-clients/single-service --solo secrets-manager"},"main":"./dist-cjs/index.js","types":"./dist-types/index.d.ts","module":"./dist-es/index.js","sideEffects":false,"dependencies":{"@aws-crypto/sha256-browser":"5.2.0","@aws-crypto/sha256-js":"5.2.0","@aws-sdk/client-sso-oidc":"3.716.0","@aws-sdk/client-sts":"3.716.0","@aws-sdk/core":"3.716.0","@aws-sdk/credential-provider-node":"3.716.0","@aws-sdk/middleware-host-header":"3.714.0","@aws-sdk/middleware-logger":"3.714.0","@aws-sdk/middleware-recursion-detection":"3.714.0","@aws-sdk/middleware-user-agent":"3.716.0","@aws-sdk/region-config-resolver":"3.714.0","@aws-sdk/types":"3.714.0","@aws-sdk/util-endpoints":"3.714.0","@aws-sdk/util-user-agent-browser":"3.714.0","@aws-sdk/util-user-agent-node":"3.716.0","@smithy/config-resolver":"^3.0.13","@smithy/core":"^2.5.5","@smithy/fetch-http-handler":"^4.1.2","@smithy/hash-node":"^3.0.11","@smithy/invalid-dependency":"^3.0.11","@smithy/middleware-content-length":"^3.0.13","@smithy/middleware-endpoint":"^3.2.6","@smithy/middleware-retry":"^3.0.31","@smithy/middleware-serde":"^3.0.11","@smithy/middleware-stack":"^3.0.11","@smithy/node-config-provider":"^3.1.12","@smithy/node-http-handler":"^3.3.2","@smithy/protocol-http":"^4.1.8","@smithy/smithy-client":"^3.5.1","@smithy/types":"^3.7.2","@smithy/url-parser":"^3.0.11","@smithy/util-base64":"^3.0.0","@smithy/util-body-length-browser":"^3.0.0","@smithy/util-body-length-node":"^3.0.0","@smithy/util-defaults-mode-browser":"^3.0.31","@smithy/util-defaults-mode-node":"^3.0.31","@smithy/util-endpoints":"^2.1.7","@smithy/util-middleware":"^3.0.11","@smithy/util-retry":"^3.0.11","@smithy/util-utf8":"^3.0.0","@types/uuid":"^9.0.1","tslib":"^2.6.2","uuid":"^9.0.1"},"devDependencies":{"@tsconfig/node16":"16.1.3","@types/node":"^16.18.96","concurrently":"7.0.0","downlevel-dts":"0.10.1","rimraf":"3.0.2","typescript":"~4.9.5"},"engines":{"node":">=16.0.0"},"typesVersions":{"<4.0":{"dist-types/*":["dist-types/ts3.4/*"]}},"files":["dist-*/**"],"author":{"name":"AWS SDK for JavaScript Team","url":"https://aws.amazon.com/javascript/"},"license":"Apache-2.0","browser":{"./dist-es/runtimeConfig":"./dist-es/runtimeConfig.browser"},"react-native":{"./dist-es/runtimeConfig":"./dist-es/runtimeConfig.native"},"homepage":"https://github.com/aws/aws-sdk-js-v3/tree/main/clients/client-secrets-manager","repository":{"type":"git","url":"https://github.com/aws/aws-sdk-js-v3.git","directory":"clients/client-secrets-manager"}}');
+
+/***/ }),
+
+/***/ 9412:
+/***/ ((module) => {
+
+"use strict";
+module.exports = /*#__PURE__*/JSON.parse('{"name":"@aws-sdk/client-sso-oidc","description":"AWS SDK for JavaScript Sso Oidc Client for Node.js, Browser and React Native","version":"3.716.0","scripts":{"build":"concurrently \'yarn:build:cjs\' \'yarn:build:es\' \'yarn:build:types\'","build:cjs":"node ../../scripts/compilation/inline client-sso-oidc","build:es":"tsc -p tsconfig.es.json","build:include:deps":"lerna run --scope $npm_package_name --include-dependencies build","build:types":"tsc -p tsconfig.types.json","build:types:downlevel":"downlevel-dts dist-types dist-types/ts3.4","clean":"rimraf ./dist-* && rimraf *.tsbuildinfo","extract:docs":"api-extractor run --local","generate:client":"node ../../scripts/generate-clients/single-service --solo sso-oidc"},"main":"./dist-cjs/index.js","types":"./dist-types/index.d.ts","module":"./dist-es/index.js","sideEffects":false,"dependencies":{"@aws-crypto/sha256-browser":"5.2.0","@aws-crypto/sha256-js":"5.2.0","@aws-sdk/core":"3.716.0","@aws-sdk/credential-provider-node":"3.716.0","@aws-sdk/middleware-host-header":"3.714.0","@aws-sdk/middleware-logger":"3.714.0","@aws-sdk/middleware-recursion-detection":"3.714.0","@aws-sdk/middleware-user-agent":"3.716.0","@aws-sdk/region-config-resolver":"3.714.0","@aws-sdk/types":"3.714.0","@aws-sdk/util-endpoints":"3.714.0","@aws-sdk/util-user-agent-browser":"3.714.0","@aws-sdk/util-user-agent-node":"3.716.0","@smithy/config-resolver":"^3.0.13","@smithy/core":"^2.5.5","@smithy/fetch-http-handler":"^4.1.2","@smithy/hash-node":"^3.0.11","@smithy/invalid-dependency":"^3.0.11","@smithy/middleware-content-length":"^3.0.13","@smithy/middleware-endpoint":"^3.2.6","@smithy/middleware-retry":"^3.0.31","@smithy/middleware-serde":"^3.0.11","@smithy/middleware-stack":"^3.0.11","@smithy/node-config-provider":"^3.1.12","@smithy/node-http-handler":"^3.3.2","@smithy/protocol-http":"^4.1.8","@smithy/smithy-client":"^3.5.1","@smithy/types":"^3.7.2","@smithy/url-parser":"^3.0.11","@smithy/util-base64":"^3.0.0","@smithy/util-body-length-browser":"^3.0.0","@smithy/util-body-length-node":"^3.0.0","@smithy/util-defaults-mode-browser":"^3.0.31","@smithy/util-defaults-mode-node":"^3.0.31","@smithy/util-endpoints":"^2.1.7","@smithy/util-middleware":"^3.0.11","@smithy/util-retry":"^3.0.11","@smithy/util-utf8":"^3.0.0","tslib":"^2.6.2"},"devDependencies":{"@tsconfig/node16":"16.1.3","@types/node":"^16.18.96","concurrently":"7.0.0","downlevel-dts":"0.10.1","rimraf":"3.0.2","typescript":"~4.9.5"},"engines":{"node":">=16.0.0"},"typesVersions":{"<4.0":{"dist-types/*":["dist-types/ts3.4/*"]}},"files":["dist-*/**"],"author":{"name":"AWS SDK for JavaScript Team","url":"https://aws.amazon.com/javascript/"},"license":"Apache-2.0","peerDependencies":{"@aws-sdk/client-sts":"^3.716.0"},"browser":{"./dist-es/runtimeConfig":"./dist-es/runtimeConfig.browser"},"react-native":{"./dist-es/runtimeConfig":"./dist-es/runtimeConfig.native"},"homepage":"https://github.com/aws/aws-sdk-js-v3/tree/main/clients/client-sso-oidc","repository":{"type":"git","url":"https://github.com/aws/aws-sdk-js-v3.git","directory":"clients/client-sso-oidc"}}');
+
+/***/ }),
+
+/***/ 5188:
+/***/ ((module) => {
+
+"use strict";
+module.exports = /*#__PURE__*/JSON.parse('{"name":"@aws-sdk/client-sso","description":"AWS SDK for JavaScript Sso Client for Node.js, Browser and React Native","version":"3.716.0","scripts":{"build":"concurrently \'yarn:build:cjs\' \'yarn:build:es\' \'yarn:build:types\'","build:cjs":"node ../../scripts/compilation/inline client-sso","build:es":"tsc -p tsconfig.es.json","build:include:deps":"lerna run --scope $npm_package_name --include-dependencies build","build:types":"tsc -p tsconfig.types.json","build:types:downlevel":"downlevel-dts dist-types dist-types/ts3.4","clean":"rimraf ./dist-* && rimraf *.tsbuildinfo","extract:docs":"api-extractor run --local","generate:client":"node ../../scripts/generate-clients/single-service --solo sso"},"main":"./dist-cjs/index.js","types":"./dist-types/index.d.ts","module":"./dist-es/index.js","sideEffects":false,"dependencies":{"@aws-crypto/sha256-browser":"5.2.0","@aws-crypto/sha256-js":"5.2.0","@aws-sdk/core":"3.716.0","@aws-sdk/middleware-host-header":"3.714.0","@aws-sdk/middleware-logger":"3.714.0","@aws-sdk/middleware-recursion-detection":"3.714.0","@aws-sdk/middleware-user-agent":"3.716.0","@aws-sdk/region-config-resolver":"3.714.0","@aws-sdk/types":"3.714.0","@aws-sdk/util-endpoints":"3.714.0","@aws-sdk/util-user-agent-browser":"3.714.0","@aws-sdk/util-user-agent-node":"3.716.0","@smithy/config-resolver":"^3.0.13","@smithy/core":"^2.5.5","@smithy/fetch-http-handler":"^4.1.2","@smithy/hash-node":"^3.0.11","@smithy/invalid-dependency":"^3.0.11","@smithy/middleware-content-length":"^3.0.13","@smithy/middleware-endpoint":"^3.2.6","@smithy/middleware-retry":"^3.0.31","@smithy/middleware-serde":"^3.0.11","@smithy/middleware-stack":"^3.0.11","@smithy/node-config-provider":"^3.1.12","@smithy/node-http-handler":"^3.3.2","@smithy/protocol-http":"^4.1.8","@smithy/smithy-client":"^3.5.1","@smithy/types":"^3.7.2","@smithy/url-parser":"^3.0.11","@smithy/util-base64":"^3.0.0","@smithy/util-body-length-browser":"^3.0.0","@smithy/util-body-length-node":"^3.0.0","@smithy/util-defaults-mode-browser":"^3.0.31","@smithy/util-defaults-mode-node":"^3.0.31","@smithy/util-endpoints":"^2.1.7","@smithy/util-middleware":"^3.0.11","@smithy/util-retry":"^3.0.11","@smithy/util-utf8":"^3.0.0","tslib":"^2.6.2"},"devDependencies":{"@tsconfig/node16":"16.1.3","@types/node":"^16.18.96","concurrently":"7.0.0","downlevel-dts":"0.10.1","rimraf":"3.0.2","typescript":"~4.9.5"},"engines":{"node":">=16.0.0"},"typesVersions":{"<4.0":{"dist-types/*":["dist-types/ts3.4/*"]}},"files":["dist-*/**"],"author":{"name":"AWS SDK for JavaScript Team","url":"https://aws.amazon.com/javascript/"},"license":"Apache-2.0","browser":{"./dist-es/runtimeConfig":"./dist-es/runtimeConfig.browser"},"react-native":{"./dist-es/runtimeConfig":"./dist-es/runtimeConfig.native"},"homepage":"https://github.com/aws/aws-sdk-js-v3/tree/main/clients/client-sso","repository":{"type":"git","url":"https://github.com/aws/aws-sdk-js-v3.git","directory":"clients/client-sso"}}');
+
+/***/ }),
+
+/***/ 4959:
+/***/ ((module) => {
+
+"use strict";
+module.exports = /*#__PURE__*/JSON.parse('{"name":"@aws-sdk/client-sts","description":"AWS SDK for JavaScript Sts Client for Node.js, Browser and React Native","version":"3.716.0","scripts":{"build":"concurrently \'yarn:build:cjs\' \'yarn:build:es\' \'yarn:build:types\'","build:cjs":"node ../../scripts/compilation/inline client-sts","build:es":"tsc -p tsconfig.es.json","build:include:deps":"lerna run --scope $npm_package_name --include-dependencies build","build:types":"rimraf ./dist-types tsconfig.types.tsbuildinfo && tsc -p tsconfig.types.json","build:types:downlevel":"downlevel-dts dist-types dist-types/ts3.4","clean":"rimraf ./dist-* && rimraf *.tsbuildinfo","extract:docs":"api-extractor run --local","generate:client":"node ../../scripts/generate-clients/single-service --solo sts","test":"yarn g:vitest run","test:watch":"yarn g:vitest watch"},"main":"./dist-cjs/index.js","types":"./dist-types/index.d.ts","module":"./dist-es/index.js","sideEffects":false,"dependencies":{"@aws-crypto/sha256-browser":"5.2.0","@aws-crypto/sha256-js":"5.2.0","@aws-sdk/client-sso-oidc":"3.716.0","@aws-sdk/core":"3.716.0","@aws-sdk/credential-provider-node":"3.716.0","@aws-sdk/middleware-host-header":"3.714.0","@aws-sdk/middleware-logger":"3.714.0","@aws-sdk/middleware-recursion-detection":"3.714.0","@aws-sdk/middleware-user-agent":"3.716.0","@aws-sdk/region-config-resolver":"3.714.0","@aws-sdk/types":"3.714.0","@aws-sdk/util-endpoints":"3.714.0","@aws-sdk/util-user-agent-browser":"3.714.0","@aws-sdk/util-user-agent-node":"3.716.0","@smithy/config-resolver":"^3.0.13","@smithy/core":"^2.5.5","@smithy/fetch-http-handler":"^4.1.2","@smithy/hash-node":"^3.0.11","@smithy/invalid-dependency":"^3.0.11","@smithy/middleware-content-length":"^3.0.13","@smithy/middleware-endpoint":"^3.2.6","@smithy/middleware-retry":"^3.0.31","@smithy/middleware-serde":"^3.0.11","@smithy/middleware-stack":"^3.0.11","@smithy/node-config-provider":"^3.1.12","@smithy/node-http-handler":"^3.3.2","@smithy/protocol-http":"^4.1.8","@smithy/smithy-client":"^3.5.1","@smithy/types":"^3.7.2","@smithy/url-parser":"^3.0.11","@smithy/util-base64":"^3.0.0","@smithy/util-body-length-browser":"^3.0.0","@smithy/util-body-length-node":"^3.0.0","@smithy/util-defaults-mode-browser":"^3.0.31","@smithy/util-defaults-mode-node":"^3.0.31","@smithy/util-endpoints":"^2.1.7","@smithy/util-middleware":"^3.0.11","@smithy/util-retry":"^3.0.11","@smithy/util-utf8":"^3.0.0","tslib":"^2.6.2"},"devDependencies":{"@tsconfig/node16":"16.1.3","@types/node":"^16.18.96","concurrently":"7.0.0","downlevel-dts":"0.10.1","rimraf":"3.0.2","typescript":"~4.9.5"},"engines":{"node":">=16.0.0"},"typesVersions":{"<4.0":{"dist-types/*":["dist-types/ts3.4/*"]}},"files":["dist-*/**"],"author":{"name":"AWS SDK for JavaScript Team","url":"https://aws.amazon.com/javascript/"},"license":"Apache-2.0","browser":{"./dist-es/runtimeConfig":"./dist-es/runtimeConfig.browser"},"react-native":{"./dist-es/runtimeConfig":"./dist-es/runtimeConfig.native"},"homepage":"https://github.com/aws/aws-sdk-js-v3/tree/main/clients/client-sts","repository":{"type":"git","url":"https://github.com/aws/aws-sdk-js-v3.git","directory":"clients/client-sts"}}');
+
+/***/ })
+
+/******/ 	});
+/************************************************************************/
+/******/ 	// The module cache
+/******/ 	var __webpack_module_cache__ = {};
+/******/ 	
+/******/ 	// The require function
+/******/ 	function __nccwpck_require__(moduleId) {
+/******/ 		// Check if module is in cache
+/******/ 		var cachedModule = __webpack_module_cache__[moduleId];
+/******/ 		if (cachedModule !== undefined) {
+/******/ 			return cachedModule.exports;
+/******/ 		}
+/******/ 		// Create a new module (and put it into the cache)
+/******/ 		var module = __webpack_module_cache__[moduleId] = {
+/******/ 			// no module.id needed
+/******/ 			// no module.loaded needed
+/******/ 			exports: {}
+/******/ 		};
+/******/ 	
+/******/ 		// Execute the module function
+/******/ 		var threw = true;
+/******/ 		try {
+/******/ 			__webpack_modules__[moduleId].call(module.exports, module, module.exports, __nccwpck_require__);
+/******/ 			threw = false;
+/******/ 		} finally {
+/******/ 			if(threw) delete __webpack_module_cache__[moduleId];
+/******/ 		}
+/******/ 	
+/******/ 		// Return the exports of the module
+/******/ 		return module.exports;
+/******/ 	}
+/******/ 	
+/************************************************************************/
+/******/ 	/* webpack/runtime/compat */
+/******/ 	
+/******/ 	if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/";
+/******/ 	
+/************************************************************************/
+/******/ 	
+/******/ 	// startup
+/******/ 	// Load entry module and return exports
+/******/ 	// This entry module is referenced by other modules so it can't be inlined
+/******/ 	var __webpack_exports__ = __nccwpck_require__(9407);
+/******/ 	module.exports = __webpack_exports__;
+/******/ 	
+/******/ })()
+;
\ No newline at end of file
diff --git a/list-changed-modules/action.yaml b/list-changed-modules/action.yaml
index b27e05929..a18288e7d 100644
--- a/list-changed-modules/action.yaml
+++ b/list-changed-modules/action.yaml
@@ -25,7 +25,7 @@ runs:
     - run: |
         echo '::warning::This action was deprecated. Please use suzuki-shunsuke/tfaction/js action.'
       shell: bash
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: main
       with:
         action: list-changed-modules
diff --git a/list-module-callers/action.yaml b/list-module-callers/action.yaml
index 59517f756..2d91c9d05 100644
--- a/list-module-callers/action.yaml
+++ b/list-module-callers/action.yaml
@@ -21,7 +21,7 @@ runs:
     - run: |
         echo '::warning::This action was deprecated. Please use suzuki-shunsuke/tfaction/js action.'
       shell: bash
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: main
       with:
         action: list-module-callers
diff --git a/list-targets-with-changed-files/action.yaml b/list-targets-with-changed-files/action.yaml
index f93d1fae9..b1fda566c 100644
--- a/list-targets-with-changed-files/action.yaml
+++ b/list-targets-with-changed-files/action.yaml
@@ -36,7 +36,7 @@ runs:
     - run: |
         echo '::warning::This action was deprecated. Please use suzuki-shunsuke/tfaction/js action.'
       shell: bash
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: main
       with:
         action: list-targets-with-changed-files
diff --git a/list-targets/action.yaml b/list-targets/action.yaml
index e257540af..12cc10896 100644
--- a/list-targets/action.yaml
+++ b/list-targets/action.yaml
@@ -18,7 +18,7 @@ outputs:
 runs:
   using: composite
   steps:
-    - uses: suzuki-shunsuke/tfaction/install@main
+    - uses: suzuki-shunsuke/tfaction/install@v1.14.0
       id: install
     - run: github-comment exec -- ci-info run | sed "s/^export //" >> "$GITHUB_ENV"
       shell: bash
@@ -41,20 +41,20 @@ runs:
       env:
         HEAD_SHA: ${{github.event.pull_request.head.sha}}
 
-    - uses: suzuki-shunsuke/tfaction/list-working-dirs@main
+    - uses: suzuki-shunsuke/tfaction/list-working-dirs@v1.14.0
       id: list-working-directory-configs
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: global-config
       with:
         action: get-global-config
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: list-module-callers
       if: steps.global-config.outputs.update_local_path_module_caller == 'true'
       with:
         action: list-module-callers
         config_files: ${{ steps.list-working-directory-configs.outputs.file }}
         module_files: ${{ steps.list-working-directory-configs.outputs.module_file }}
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: list-targets
       with:
         action: list-targets-with-changed-files
diff --git a/list-working-dirs/action.yaml b/list-working-dirs/action.yaml
index 59f448191..cb73052ff 100644
--- a/list-working-dirs/action.yaml
+++ b/list-working-dirs/action.yaml
@@ -10,7 +10,7 @@ outputs:
 runs:
   using: composite
   steps:
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: global-config
       with:
         action: get-global-config
diff --git a/plan/action.yaml b/plan/action.yaml
index 7a14668c4..655eec341 100644
--- a/plan/action.yaml
+++ b/plan/action.yaml
@@ -8,12 +8,12 @@ inputs:
 runs:
   using: composite
   steps:
-    - uses: suzuki-shunsuke/tfaction/terraform-plan@main
+    - uses: suzuki-shunsuke/tfaction/terraform-plan@v1.14.0
       if: env.TFACTION_JOB_TYPE == 'terraform'
       with:
         github_token: ${{inputs.github_token}}
 
-    - uses: suzuki-shunsuke/tfaction/tfmigrate-plan@main
+    - uses: suzuki-shunsuke/tfaction/tfmigrate-plan@v1.14.0
       if: env.TFACTION_JOB_TYPE == 'tfmigrate'
       with:
         github_token: ${{inputs.github_token}}
diff --git a/scaffold-module/action.yaml b/scaffold-module/action.yaml
index 1e0b33b5b..86f674bf3 100644
--- a/scaffold-module/action.yaml
+++ b/scaffold-module/action.yaml
@@ -42,7 +42,7 @@ runs:
         fi
       shell: bash
 
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: global-config
       with:
         action: get-global-config
diff --git a/scaffold-tfmigrate/action.yaml b/scaffold-tfmigrate/action.yaml
index df3211081..5c751bb0c 100644
--- a/scaffold-tfmigrate/action.yaml
+++ b/scaffold-tfmigrate/action.yaml
@@ -29,11 +29,11 @@ outputs:
 runs:
   using: composite
   steps:
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: global-config
       with:
         action: get-global-config
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: target-config
       with:
         action: get-target-config
diff --git a/scaffold-working-dir/action.yaml b/scaffold-working-dir/action.yaml
index e127b2ac7..6bd354d49 100644
--- a/scaffold-working-dir/action.yaml
+++ b/scaffold-working-dir/action.yaml
@@ -15,11 +15,11 @@ outputs:
 runs:
   using: composite
   steps:
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: global-config
       with:
         action: get-global-config
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: target-config
       with:
         action: get-target-config
diff --git a/set-drift-env/action.yaml b/set-drift-env/action.yaml
index a4f1b916d..5c7424b1d 100644
--- a/set-drift-env/action.yaml
+++ b/set-drift-env/action.yaml
@@ -7,7 +7,7 @@ inputs:
 runs:
   using: composite
   steps:
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: global-config
       with:
         action: get-global-config
diff --git a/setup/action.yaml b/setup/action.yaml
index e9c4dfe9e..38fd38aea 100644
--- a/setup/action.yaml
+++ b/setup/action.yaml
@@ -23,7 +23,7 @@ outputs:
 runs:
   using: composite
   steps:
-    - uses: suzuki-shunsuke/tfaction/install@main
+    - uses: suzuki-shunsuke/tfaction/install@v1.14.0
       id: install
     - run: github-comment exec -- ci-info run | sed "s/^export //" >> "$GITHUB_ENV"
       shell: bash
@@ -46,7 +46,7 @@ runs:
       env:
         HEAD_SHA: ${{github.event.pull_request.head.sha}}
 
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: target-config
       with:
         action: get-target-config
@@ -62,7 +62,7 @@ runs:
       env:
         GITHUB_TOKEN: ${{ inputs.github_token }}
 
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: global-config
       with:
         action: get-global-config
@@ -99,7 +99,7 @@ runs:
         role-session-name: ${{ steps.target-config.outputs.aws_role_session_name }}
         aws-region: ${{ steps.target-config.outputs.aws_region }}
 
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       with:
         action: export-aws-secrets-manager
 
@@ -127,7 +127,7 @@ runs:
     - uses: google-github-actions/setup-gcloud@6189d56e4096ee891640bb02ac264be376592d6a # v2.1.2
       if: steps.target-config.outputs.gcp_service_account != ''
 
-    - uses: suzuki-shunsuke/tfaction/deploy-ssh-key@main
+    - uses: suzuki-shunsuke/tfaction/deploy-ssh-key@v1.14.0
       if: inputs.ssh_key != ''
       with:
         ssh_key: ${{ inputs.ssh_key }}
diff --git a/terraform-apply/action.yaml b/terraform-apply/action.yaml
index eb541f9a5..9e737ced8 100644
--- a/terraform-apply/action.yaml
+++ b/terraform-apply/action.yaml
@@ -21,15 +21,15 @@ outputs:
 runs:
   using: composite
   steps:
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: target-config
       with:
         action: get-target-config
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: global-config
       with:
         action: get-global-config
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: check-terraform-skip
       with:
         action: check-terraform-skip
diff --git a/terraform-plan/action.yaml b/terraform-plan/action.yaml
index b1157814a..9af6be94e 100644
--- a/terraform-plan/action.yaml
+++ b/terraform-plan/action.yaml
@@ -11,16 +11,16 @@ inputs:
 runs:
   using: composite
   steps:
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: target-config
       with:
         action: get-target-config
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: global-config
       with:
         action: get-global-config
 
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: check-terraform-skip
       if: "! env.TFACTION_DRIFT_ISSUE_NUMBER"
       with:
@@ -43,7 +43,7 @@ runs:
         CONFTEST_POLICY_DIRECTORY: ${{ steps.global-config.outputs.conftest_policy_directory }}
         TF_COMMAND: ${{ steps.target-config.outputs.terraform_command }}
 
-    - uses: suzuki-shunsuke/tfaction/conftest@main
+    - uses: suzuki-shunsuke/tfaction/conftest@v1.14.0
       if: |
         env.TFACTION_DRIFT_ISSUE_NUMBER || !fromJSON(steps.check-terraform-skip.outputs.skip_terraform)
       with:
diff --git a/test-module/action.yaml b/test-module/action.yaml
index ae4fa85ff..80b41fa38 100644
--- a/test-module/action.yaml
+++ b/test-module/action.yaml
@@ -11,14 +11,14 @@ inputs:
 runs:
   using: composite
   steps:
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: global-config
       with:
         action: get-global-config
 
     # TODO Run conftest on modules
     # https://github.com/suzuki-shunsuke/tfaction/issues/1908#issuecomment-2415572321
-    # - uses: suzuki-shunsuke/tfaction/conftest@main
+    # - uses: suzuki-shunsuke/tfaction/conftest@v1.14.0
     #   with:
     #     github_token: ${{ inputs.github_token }}
 
@@ -56,7 +56,7 @@ runs:
       shell: bash
       working-directory: ${{ env.TFACTION_TARGET }}
 
-    - uses: suzuki-shunsuke/tfaction/terraform-docs@main
+    - uses: suzuki-shunsuke/tfaction/terraform-docs@v1.14.0
       with:
         github_token: ${{ inputs.github_token }}
         working_directory: ${{ env.TFACTION_TARGET }}
diff --git a/test/action.yaml b/test/action.yaml
index b121bf17d..cfb4db170 100644
--- a/test/action.yaml
+++ b/test/action.yaml
@@ -11,11 +11,11 @@ inputs:
 runs:
   using: composite
   steps:
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: target-config
       with:
         action: get-target-config
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: global-config
       with:
         action: get-global-config
@@ -23,7 +23,7 @@ runs:
     - shell: bash
       id: action_path
       run: echo "value=$GITHUB_ACTION_PATH" >> "$GITHUB_OUTPUT"
-    - uses: suzuki-shunsuke/tfaction/conftest@main
+    - uses: suzuki-shunsuke/tfaction/conftest@v1.14.0
       with:
         github_token: ${{ inputs.github_token }}
       env:
@@ -75,7 +75,7 @@ runs:
         skip_push: ${{ github.event_name != 'pull_request' && ! startsWith(github.event_name, 'pull_request_') }}
         terraform_command: ${{ steps.target-config.outputs.terraform_command }}
 
-    - uses: suzuki-shunsuke/tfaction/terraform-docs@main
+    - uses: suzuki-shunsuke/tfaction/terraform-docs@v1.14.0
       if: fromJSON(steps.target-config.outputs.enable_terraform_docs) && steps.target-config.outputs.destroy != 'true'
       with:
         github_token: ${{ inputs.github_token }}
diff --git a/tfmigrate-apply/action.yaml b/tfmigrate-apply/action.yaml
index 25a9c2f0a..24d16283a 100644
--- a/tfmigrate-apply/action.yaml
+++ b/tfmigrate-apply/action.yaml
@@ -12,11 +12,11 @@ inputs:
 runs:
   using: composite
   steps:
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: global-config
       with:
         action: get-global-config
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: target-config
       with:
         action: get-target-config
diff --git a/tfmigrate-plan/action.yaml b/tfmigrate-plan/action.yaml
index cc7e8d71a..15cc6f34e 100644
--- a/tfmigrate-plan/action.yaml
+++ b/tfmigrate-plan/action.yaml
@@ -11,11 +11,11 @@ inputs:
 runs:
   using: composite
   steps:
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: global-config
       with:
         action: get-global-config
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: target-config
       with:
         action: get-target-config
@@ -31,7 +31,7 @@ runs:
         GCS_BUCKET_NAME_TFMIGRATE_HISTORY: ${{ steps.target-config.outputs.gcs_bucket_name_tfmigrate_history }}
         CONFTEST_POLICY_DIRECTORY: ${{ steps.global-config.outputs.conftest_policy_directory }}
         TF_COMMAND: ${{ steps.global-config.outputs.terraform_command }}
-    - uses: suzuki-shunsuke/tfaction/conftest@main
+    - uses: suzuki-shunsuke/tfaction/conftest@v1.14.0
       with:
         plan: "true"
         github_token: ${{inputs.github_token}}
diff --git a/update-drift-issue/action.yaml b/update-drift-issue/action.yaml
index 062019a3a..04ccadd82 100644
--- a/update-drift-issue/action.yaml
+++ b/update-drift-issue/action.yaml
@@ -14,7 +14,7 @@ inputs:
 runs:
   using: composite
   steps:
-    - uses: suzuki-shunsuke/tfaction/js@main
+    - uses: suzuki-shunsuke/tfaction/js@v1.14.0
       id: global-config
       with:
         action: get-global-config